AaronWu901225 commited on
Commit
198d101
·
verified ·
1 Parent(s): 573aebe

Upload LoRA adapter folder

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +12 -0
  2. .ipynb_checkpoints/plot_loss_from_trainer_state-checkpoint.py +160 -0
  3. README.md +209 -0
  4. adapter_config.json +39 -0
  5. adapter_model.safetensors +3 -0
  6. checkpoint-1026/README.md +209 -0
  7. checkpoint-1026/adapter_config.json +39 -0
  8. checkpoint-1026/adapter_model.safetensors +3 -0
  9. checkpoint-1026/optimizer.pt +3 -0
  10. checkpoint-1026/rng_state.pth +3 -0
  11. checkpoint-1026/scheduler.pt +3 -0
  12. checkpoint-1026/special_tokens_map.json +27 -0
  13. checkpoint-1026/tokenizer.json +3 -0
  14. checkpoint-1026/tokenizer_config.json +2070 -0
  15. checkpoint-1026/trainer_state.json +795 -0
  16. checkpoint-1026/training_args.bin +3 -0
  17. checkpoint-1197/README.md +209 -0
  18. checkpoint-1197/adapter_config.json +39 -0
  19. checkpoint-1197/adapter_model.safetensors +3 -0
  20. checkpoint-1197/optimizer.pt +3 -0
  21. checkpoint-1197/rng_state.pth +3 -0
  22. checkpoint-1197/scheduler.pt +3 -0
  23. checkpoint-1197/special_tokens_map.json +27 -0
  24. checkpoint-1197/tokenizer.json +3 -0
  25. checkpoint-1197/tokenizer_config.json +2070 -0
  26. checkpoint-1197/trainer_state.json +922 -0
  27. checkpoint-1197/training_args.bin +3 -0
  28. checkpoint-1368/README.md +209 -0
  29. checkpoint-1368/adapter_config.json +39 -0
  30. checkpoint-1368/adapter_model.safetensors +3 -0
  31. checkpoint-1368/optimizer.pt +3 -0
  32. checkpoint-1368/rng_state.pth +3 -0
  33. checkpoint-1368/scheduler.pt +3 -0
  34. checkpoint-1368/special_tokens_map.json +27 -0
  35. checkpoint-1368/tokenizer.json +3 -0
  36. checkpoint-1368/tokenizer_config.json +2070 -0
  37. checkpoint-1368/trainer_state.json +1049 -0
  38. checkpoint-1368/training_args.bin +3 -0
  39. checkpoint-1539/README.md +209 -0
  40. checkpoint-1539/adapter_config.json +39 -0
  41. checkpoint-1539/adapter_model.safetensors +3 -0
  42. checkpoint-1539/optimizer.pt +3 -0
  43. checkpoint-1539/rng_state.pth +3 -0
  44. checkpoint-1539/scheduler.pt +3 -0
  45. checkpoint-1539/special_tokens_map.json +27 -0
  46. checkpoint-1539/tokenizer.json +3 -0
  47. checkpoint-1539/tokenizer_config.json +2070 -0
  48. checkpoint-1539/trainer_state.json +1176 -0
  49. checkpoint-1539/training_args.bin +3 -0
  50. checkpoint-1542/README.md +209 -0
.gitattributes CHANGED
@@ -33,3 +33,15 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ checkpoint-1026/tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
+ checkpoint-1197/tokenizer.json filter=lfs diff=lfs merge=lfs -text
38
+ checkpoint-1368/tokenizer.json filter=lfs diff=lfs merge=lfs -text
39
+ checkpoint-1539/tokenizer.json filter=lfs diff=lfs merge=lfs -text
40
+ checkpoint-1542/tokenizer.json filter=lfs diff=lfs merge=lfs -text
41
+ checkpoint-171/tokenizer.json filter=lfs diff=lfs merge=lfs -text
42
+ checkpoint-342/tokenizer.json filter=lfs diff=lfs merge=lfs -text
43
+ checkpoint-513/tokenizer.json filter=lfs diff=lfs merge=lfs -text
44
+ checkpoint-684/tokenizer.json filter=lfs diff=lfs merge=lfs -text
45
+ checkpoint-855/tokenizer.json filter=lfs diff=lfs merge=lfs -text
46
+ final/tokenizer.json filter=lfs diff=lfs merge=lfs -text
47
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
.ipynb_checkpoints/plot_loss_from_trainer_state-checkpoint.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Usage:
4
+ python plot_loss_from_trainer_state.py --input trainer_state.json --outdir ./plots \
5
+ --checkpoint_steps 263,526,789,1052
6
+
7
+ 功能:
8
+ - Curve: 黃橘色實線
9
+ - Grid: x,y 虛線
10
+ - Epoch markers: 藍色虛線 + EpochN 標籤(含最後一個 epoch)
11
+ - Checkpoints: 藍色小圓點(線性插值;超出範圍時使用端點值,並自動擴張 x 軸確保能看見)
12
+ """
13
+ import json, argparse
14
+ from pathlib import Path
15
+ import matplotlib.pyplot as plt
16
+ import numpy as np
17
+
18
+ YELLOW_ORANGE = "#d58f00"
19
+ BLUE = "#1f77b4"
20
+
21
+ def find_epoch_boundaries(log_items):
22
+ """找到每個 epoch 邊界 (包含最後一個)"""
23
+ boundaries = []
24
+ prev_epoch_int = None
25
+ seen = set()
26
+ last_step, last_epoch = None, None
27
+ for it in log_items:
28
+ step = it.get("step")
29
+ ep = it.get("epoch")
30
+ if step is None or ep is None:
31
+ continue
32
+ last_step, last_epoch = step, ep
33
+ ep_int = int(ep)
34
+ if prev_epoch_int is None:
35
+ prev_epoch_int = ep_int
36
+ continue
37
+ if ep_int != prev_epoch_int:
38
+ if (step, ep_int) not in seen and ep_int >= 1:
39
+ boundaries.append((step, ep_int))
40
+ seen.add((step, ep_int))
41
+ prev_epoch_int = ep_int
42
+ # 最後一個 epoch 也補上
43
+ if last_step is not None and last_epoch is not None:
44
+ ep_final = int(float(last_epoch)) + 1
45
+ if (last_step, ep_final) not in seen:
46
+ boundaries.append((last_step, ep_final))
47
+ boundaries.sort(key=lambda x: x[0])
48
+ return boundaries
49
+
50
+ def plot_series(x, y, xlabel, ylabel, title, outpath,
51
+ epoch_marks=None, checkpoint_steps=None,
52
+ color=YELLOW_ORANGE, linestyle='-'):
53
+ fig = plt.figure(figsize=(10,6))
54
+ ax = fig.add_subplot(111)
55
+ ax.plot(x, y, color=color, linestyle=linestyle, linewidth=2)
56
+
57
+ # 標記 checkpoint 藍點(線性插值;邊界外使用端點值)
58
+ extra_x = []
59
+ if checkpoint_steps:
60
+ for s in checkpoint_steps:
61
+ y_interp = np.interp(s, x, y, left=y[0], right=y[-1])
62
+ ax.plot(s, y_interp, marker='o', color=BLUE, markersize=6)
63
+ extra_x.append(s)
64
+
65
+ # === 計算 x 範圍時把 epoch 標線也納入,並加右側 padding ===
66
+ xmin = 0
67
+ all_x_candidates = [max(x)]
68
+ if extra_x:
69
+ all_x_candidates.append(max(extra_x))
70
+ if epoch_marks:
71
+ # 把所有 epoch 標線的 step 納入考量
72
+ ep_steps = [s for (s, _) in epoch_marks]
73
+ if ep_steps:
74
+ all_x_candidates.append(max(ep_steps))
75
+
76
+ xmax_base = max(all_x_candidates) if all_x_candidates else x[-1]
77
+
78
+ # 右邊加一點 margin,避免剛好貼齊看不到線
79
+ span = max(xmax_base - xmin, 1.0)
80
+ right_pad = max(1.0, 0.02 * span) # 至少 +1 step 或 2% 寬度
81
+ ax.set_xlim(left=xmin, right=xmax_base + right_pad)
82
+
83
+ # y 仍從 0 起
84
+ ax.set_ylim(bottom=0)
85
+
86
+ # 虛線格線
87
+ ax.grid(True, which='major', axis='both', linestyle='--', linewidth=0.8, alpha=0.6)
88
+
89
+ # epoch 標記 (藍色虛線)
90
+ if epoch_marks:
91
+ for step, ep in epoch_marks:
92
+ ax.axvline(x=step, color=BLUE, linestyle='--', linewidth=1.2)
93
+ ymax = ax.get_ylim()[1]
94
+ ax.text(step, ymax*0.98, f'Epoch{ep}', rotation=90,
95
+ va='top', ha='right', fontsize=8, color=BLUE)
96
+
97
+ # label & look(放到最後避免被 set_xlim/set_ylim 影響)
98
+ ax.set_xlabel(xlabel); ax.set_ylabel(ylabel); ax.set_title(title)
99
+ ax.spines['left'].set_linewidth(2); ax.spines['bottom'].set_linewidth(2)
100
+ ax.spines['right'].set_visible(False); ax.spines['top'].set_visible(False)
101
+
102
+ fig.savefig(outpath, bbox_inches="tight")
103
+ plt.close(fig)
104
+
105
+
106
+ def main():
107
+ ap = argparse.ArgumentParser()
108
+ ap.add_argument("--input", required=True, help="Path to trainer_state.json")
109
+ ap.add_argument("--outdir", default="./plots", help="Directory to save PNGs")
110
+ ap.add_argument("--no_epoch_marks", action="store_true", help="Disable vertical epoch markers")
111
+ ap.add_argument("--checkpoint_steps", default="", help="Comma-separated steps (e.g., 100,200,500)")
112
+ args = ap.parse_args()
113
+
114
+ src = Path(args.input)
115
+ with open(src, "r", encoding="utf-8") as f:
116
+ state = json.load(f)
117
+
118
+ log = state.get("log_history", state.get("logs", []))
119
+
120
+ steps, train_losses = [], []
121
+ eval_steps, eval_losses = [], []
122
+ lr_steps, lrs = [], []
123
+
124
+ for item in log:
125
+ step = item.get("step")
126
+ if step is None:
127
+ continue
128
+ if "loss" in item:
129
+ steps.append(step); train_losses.append(item["loss"])
130
+ if "eval_loss" in item:
131
+ eval_steps.append(step); eval_losses.append(item["eval_loss"])
132
+ if "learning_rate" in item:
133
+ lr_steps.append(step); lrs.append(item["learning_rate"])
134
+
135
+ outdir = Path(args.outdir); outdir.mkdir(parents=True, exist_ok=True)
136
+
137
+ epoch_marks = None if args.no_epoch_marks else find_epoch_boundaries(log)
138
+ # 允許空白與混合格式
139
+ raw = [s.strip() for s in args.checkpoint_steps.replace(",", ",").split(",") if s.strip()]
140
+ checkpoint_steps = []
141
+ for s in raw:
142
+ try:
143
+ checkpoint_steps.append(int(float(s)))
144
+ except:
145
+ pass
146
+
147
+ if steps and train_losses:
148
+ plot_series(steps, train_losses, "Step", "Training Loss", "Training Loss vs Step",
149
+ outdir / "loss_curve.png", epoch_marks=epoch_marks, checkpoint_steps=checkpoint_steps)
150
+ if eval_steps and eval_losses:
151
+ plot_series(eval_steps, eval_losses, "Step", "Eval Loss", "Eval Loss vs Step",
152
+ outdir / "eval_loss_curve.png", epoch_marks=epoch_marks, checkpoint_steps=checkpoint_steps)
153
+ if lr_steps and lrs:
154
+ plot_series(lr_steps, lrs, "Step", "Learning Rate", "Learning Rate vs Step",
155
+ outdir / "lr_curve.png", epoch_marks=epoch_marks, checkpoint_steps=checkpoint_steps)
156
+
157
+ print(f"Saved plots to: {outdir.resolve()}")
158
+
159
+ if __name__ == "__main__":
160
+ main()
README.md ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: Salesforce/Llama-xLAM-2-8b-fc-r
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:Salesforce/Llama-xLAM-2-8b-fc-r
7
+ - lora
8
+ - sft
9
+ - transformers
10
+ - trl
11
+ ---
12
+
13
+ # Model Card for Model ID
14
+
15
+ <!-- Provide a quick summary of what the model is/does. -->
16
+
17
+
18
+
19
+ ## Model Details
20
+
21
+ ### Model Description
22
+
23
+ <!-- Provide a longer summary of what this model is. -->
24
+
25
+
26
+
27
+ - **Developed by:** [More Information Needed]
28
+ - **Funded by [optional]:** [More Information Needed]
29
+ - **Shared by [optional]:** [More Information Needed]
30
+ - **Model type:** [More Information Needed]
31
+ - **Language(s) (NLP):** [More Information Needed]
32
+ - **License:** [More Information Needed]
33
+ - **Finetuned from model [optional]:** [More Information Needed]
34
+
35
+ ### Model Sources [optional]
36
+
37
+ <!-- Provide the basic links for the model. -->
38
+
39
+ - **Repository:** [More Information Needed]
40
+ - **Paper [optional]:** [More Information Needed]
41
+ - **Demo [optional]:** [More Information Needed]
42
+
43
+ ## Uses
44
+
45
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
46
+
47
+ ### Direct Use
48
+
49
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
50
+
51
+ [More Information Needed]
52
+
53
+ ### Downstream Use [optional]
54
+
55
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
56
+
57
+ [More Information Needed]
58
+
59
+ ### Out-of-Scope Use
60
+
61
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
62
+
63
+ [More Information Needed]
64
+
65
+ ## Bias, Risks, and Limitations
66
+
67
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
68
+
69
+ [More Information Needed]
70
+
71
+ ### Recommendations
72
+
73
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
74
+
75
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
76
+
77
+ ## How to Get Started with the Model
78
+
79
+ Use the code below to get started with the model.
80
+
81
+ [More Information Needed]
82
+
83
+ ## Training Details
84
+
85
+ ### Training Data
86
+
87
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
88
+
89
+ [More Information Needed]
90
+
91
+ ### Training Procedure
92
+
93
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
94
+
95
+ #### Preprocessing [optional]
96
+
97
+ [More Information Needed]
98
+
99
+
100
+ #### Training Hyperparameters
101
+
102
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
103
+
104
+ #### Speeds, Sizes, Times [optional]
105
+
106
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
107
+
108
+ [More Information Needed]
109
+
110
+ ## Evaluation
111
+
112
+ <!-- This section describes the evaluation protocols and provides the results. -->
113
+
114
+ ### Testing Data, Factors & Metrics
115
+
116
+ #### Testing Data
117
+
118
+ <!-- This should link to a Dataset Card if possible. -->
119
+
120
+ [More Information Needed]
121
+
122
+ #### Factors
123
+
124
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
125
+
126
+ [More Information Needed]
127
+
128
+ #### Metrics
129
+
130
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
131
+
132
+ [More Information Needed]
133
+
134
+ ### Results
135
+
136
+ [More Information Needed]
137
+
138
+ #### Summary
139
+
140
+
141
+
142
+ ## Model Examination [optional]
143
+
144
+ <!-- Relevant interpretability work for the model goes here -->
145
+
146
+ [More Information Needed]
147
+
148
+ ## Environmental Impact
149
+
150
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
151
+
152
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
153
+
154
+ - **Hardware Type:** [More Information Needed]
155
+ - **Hours used:** [More Information Needed]
156
+ - **Cloud Provider:** [More Information Needed]
157
+ - **Compute Region:** [More Information Needed]
158
+ - **Carbon Emitted:** [More Information Needed]
159
+
160
+ ## Technical Specifications [optional]
161
+
162
+ ### Model Architecture and Objective
163
+
164
+ [More Information Needed]
165
+
166
+ ### Compute Infrastructure
167
+
168
+ [More Information Needed]
169
+
170
+ #### Hardware
171
+
172
+ [More Information Needed]
173
+
174
+ #### Software
175
+
176
+ [More Information Needed]
177
+
178
+ ## Citation [optional]
179
+
180
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
181
+
182
+ **BibTeX:**
183
+
184
+ [More Information Needed]
185
+
186
+ **APA:**
187
+
188
+ [More Information Needed]
189
+
190
+ ## Glossary [optional]
191
+
192
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
193
+
194
+ [More Information Needed]
195
+
196
+ ## More Information [optional]
197
+
198
+ [More Information Needed]
199
+
200
+ ## Model Card Authors [optional]
201
+
202
+ [More Information Needed]
203
+
204
+ ## Model Card Contact
205
+
206
+ [More Information Needed]
207
+ ### Framework versions
208
+
209
+ - PEFT 0.17.1
adapter_config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "Salesforce/Llama-xLAM-2-8b-fc-r",
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
+ "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0.05,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "qalora_group_size": 16,
24
+ "r": 16,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "o_proj",
29
+ "k_proj",
30
+ "q_proj",
31
+ "v_proj"
32
+ ],
33
+ "target_parameters": null,
34
+ "task_type": "CAUSAL_LM",
35
+ "trainable_token_indices": null,
36
+ "use_dora": false,
37
+ "use_qalora": false,
38
+ "use_rslora": false
39
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3ded87b859177c17c5908e9bf7dbdd329d029b13e9b2d280936acb506b04ba3
3
+ size 54560368
checkpoint-1026/README.md ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: Salesforce/Llama-xLAM-2-8b-fc-r
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:Salesforce/Llama-xLAM-2-8b-fc-r
7
+ - lora
8
+ - sft
9
+ - transformers
10
+ - trl
11
+ ---
12
+
13
+ # Model Card for Model ID
14
+
15
+ <!-- Provide a quick summary of what the model is/does. -->
16
+
17
+
18
+
19
+ ## Model Details
20
+
21
+ ### Model Description
22
+
23
+ <!-- Provide a longer summary of what this model is. -->
24
+
25
+
26
+
27
+ - **Developed by:** [More Information Needed]
28
+ - **Funded by [optional]:** [More Information Needed]
29
+ - **Shared by [optional]:** [More Information Needed]
30
+ - **Model type:** [More Information Needed]
31
+ - **Language(s) (NLP):** [More Information Needed]
32
+ - **License:** [More Information Needed]
33
+ - **Finetuned from model [optional]:** [More Information Needed]
34
+
35
+ ### Model Sources [optional]
36
+
37
+ <!-- Provide the basic links for the model. -->
38
+
39
+ - **Repository:** [More Information Needed]
40
+ - **Paper [optional]:** [More Information Needed]
41
+ - **Demo [optional]:** [More Information Needed]
42
+
43
+ ## Uses
44
+
45
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
46
+
47
+ ### Direct Use
48
+
49
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
50
+
51
+ [More Information Needed]
52
+
53
+ ### Downstream Use [optional]
54
+
55
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
56
+
57
+ [More Information Needed]
58
+
59
+ ### Out-of-Scope Use
60
+
61
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
62
+
63
+ [More Information Needed]
64
+
65
+ ## Bias, Risks, and Limitations
66
+
67
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
68
+
69
+ [More Information Needed]
70
+
71
+ ### Recommendations
72
+
73
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
74
+
75
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
76
+
77
+ ## How to Get Started with the Model
78
+
79
+ Use the code below to get started with the model.
80
+
81
+ [More Information Needed]
82
+
83
+ ## Training Details
84
+
85
+ ### Training Data
86
+
87
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
88
+
89
+ [More Information Needed]
90
+
91
+ ### Training Procedure
92
+
93
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
94
+
95
+ #### Preprocessing [optional]
96
+
97
+ [More Information Needed]
98
+
99
+
100
+ #### Training Hyperparameters
101
+
102
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
103
+
104
+ #### Speeds, Sizes, Times [optional]
105
+
106
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
107
+
108
+ [More Information Needed]
109
+
110
+ ## Evaluation
111
+
112
+ <!-- This section describes the evaluation protocols and provides the results. -->
113
+
114
+ ### Testing Data, Factors & Metrics
115
+
116
+ #### Testing Data
117
+
118
+ <!-- This should link to a Dataset Card if possible. -->
119
+
120
+ [More Information Needed]
121
+
122
+ #### Factors
123
+
124
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
125
+
126
+ [More Information Needed]
127
+
128
+ #### Metrics
129
+
130
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
131
+
132
+ [More Information Needed]
133
+
134
+ ### Results
135
+
136
+ [More Information Needed]
137
+
138
+ #### Summary
139
+
140
+
141
+
142
+ ## Model Examination [optional]
143
+
144
+ <!-- Relevant interpretability work for the model goes here -->
145
+
146
+ [More Information Needed]
147
+
148
+ ## Environmental Impact
149
+
150
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
151
+
152
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
153
+
154
+ - **Hardware Type:** [More Information Needed]
155
+ - **Hours used:** [More Information Needed]
156
+ - **Cloud Provider:** [More Information Needed]
157
+ - **Compute Region:** [More Information Needed]
158
+ - **Carbon Emitted:** [More Information Needed]
159
+
160
+ ## Technical Specifications [optional]
161
+
162
+ ### Model Architecture and Objective
163
+
164
+ [More Information Needed]
165
+
166
+ ### Compute Infrastructure
167
+
168
+ [More Information Needed]
169
+
170
+ #### Hardware
171
+
172
+ [More Information Needed]
173
+
174
+ #### Software
175
+
176
+ [More Information Needed]
177
+
178
+ ## Citation [optional]
179
+
180
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
181
+
182
+ **BibTeX:**
183
+
184
+ [More Information Needed]
185
+
186
+ **APA:**
187
+
188
+ [More Information Needed]
189
+
190
+ ## Glossary [optional]
191
+
192
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
193
+
194
+ [More Information Needed]
195
+
196
+ ## More Information [optional]
197
+
198
+ [More Information Needed]
199
+
200
+ ## Model Card Authors [optional]
201
+
202
+ [More Information Needed]
203
+
204
+ ## Model Card Contact
205
+
206
+ [More Information Needed]
207
+ ### Framework versions
208
+
209
+ - PEFT 0.17.1
checkpoint-1026/adapter_config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "Salesforce/Llama-xLAM-2-8b-fc-r",
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
+ "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0.05,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "qalora_group_size": 16,
24
+ "r": 16,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "o_proj",
29
+ "k_proj",
30
+ "q_proj",
31
+ "v_proj"
32
+ ],
33
+ "target_parameters": null,
34
+ "task_type": "CAUSAL_LM",
35
+ "trainable_token_indices": null,
36
+ "use_dora": false,
37
+ "use_qalora": false,
38
+ "use_rslora": false
39
+ }
checkpoint-1026/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1ad489d923b7844231f22b8ab5509eaf9d915c344d5276adcfe6c559be60234
3
+ size 54560368
checkpoint-1026/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6004a9387151e27cdcaf47b7652286824d74fd15218205de54d0ca200904bcd
3
+ size 109267450
checkpoint-1026/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f2e75d51727968d6d444015b953c77c079e2e3dcd94cd880fb4e18b29468768
3
+ size 14244
checkpoint-1026/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8fc2f667ceb7f1761af746f0ff12015b6353f2ba4b1c1f1175f8fadf4a5d348
3
+ size 1064
checkpoint-1026/special_tokens_map.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|eot_id|>",
4
+ "<|eom_id|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|begin_of_text|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|eot_id|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|eot_id|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ }
27
+ }
checkpoint-1026/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
checkpoint-1026/tokenizer_config.json ADDED
@@ -0,0 +1,2070 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eot_id|>",
2054
+ "<|eom_id|>"
2055
+ ],
2056
+ "bos_token": "<|begin_of_text|>",
2057
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- Extract system message #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] | trim %}\n {%- set messages = messages[1:] %}\n {{- system_message + \"\\n\" }}\n{%- else %}\n {%- set system_message = \"You are a helpful assistant that can use tools. You are developed by Salesforce xLAM team.\" %}\n {% set format_instruction %}You have access to a set of tools. When using tools, make calls in a single JSON array: \n\n[{\"name\": \"tool_call_name\", \"arguments\": {\"arg1\": \"value1\", \"arg2\": \"value2\"}}, ... (additional parallel tool calls as needed)]\n\nIf no tool is suitable, state that explicitly. If the user's input lacks required parameters, ask for clarification. Do not interpret or respond until tool results are returned. Once they are available, process them or make additional calls if needed. For tasks that don't require tools, such as casual conversation or general advice, respond directly in plain text. The available tools are:{% endset %}\n {{- system_message + \"\\n\" }}\n {%- if tools is not none %}\n {{- format_instruction + \"\\n\\n\" }}\n {%- endif %}\n{%- endif %}\n\n\n{%- if tools is not none %}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- \"<|eot_id|>\" }}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {%- if message['tool_calls'] %}\n {{- \"[\" }}\n {%- for tool_call_function in message.tool_calls %}\n {%- set tool_call = tool_call_function.function %}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n {{- \"<|eot_id|>\" }}\n {%- elif message['content'] %}\n {{- message['content'] | trim + '<|eot_id|>' }}\n {%- else %}\n {{- \"[]\\n\" + '<|eot_id|>' }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>\" + \"ipython\" + \"<|end_header_id|>\\n\\n\" }}\n {%- set content = message[\"content\"] %}\n {%- if content is mapping or (content is iterable and content is not string) %}\n {{- content | tojson }}\n {%- else %}\n {{- content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}",
2058
+ "clean_up_tokenization_spaces": true,
2059
+ "eos_token": "<|eot_id|>",
2060
+ "extra_special_tokens": {},
2061
+ "model_input_names": [
2062
+ "input_ids",
2063
+ "attention_mask"
2064
+ ],
2065
+ "model_max_length": 16384,
2066
+ "pad_token": "<|eot_id|>",
2067
+ "padding_side": "right",
2068
+ "split_special_tokens": false,
2069
+ "tokenizer_class": "PreTrainedTokenizerFast"
2070
+ }
checkpoint-1026/trainer_state.json ADDED
@@ -0,0 +1,795 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.04268278926610947,
3
+ "best_model_checkpoint": "./xlam_lora_new_ete_over_size_3epoch_multi_full_eng/checkpoint-1026",
4
+ "epoch": 1.9940479805648343,
5
+ "eval_steps": 171,
6
+ "global_step": 1026,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.019435165502581234,
13
+ "grad_norm": 0.2028260976076126,
14
+ "learning_rate": 3.205128205128205e-06,
15
+ "loss": 0.0883,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.03887033100516247,
20
+ "grad_norm": 0.16199472546577454,
21
+ "learning_rate": 6.41025641025641e-06,
22
+ "loss": 0.0862,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.0583054965077437,
27
+ "grad_norm": 0.10014229267835617,
28
+ "learning_rate": 9.615384615384616e-06,
29
+ "loss": 0.0785,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.07774066201032494,
34
+ "grad_norm": 0.07943801581859589,
35
+ "learning_rate": 1.282051282051282e-05,
36
+ "loss": 0.0772,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.09717582751290617,
41
+ "grad_norm": 0.11369317024946213,
42
+ "learning_rate": 1.602564102564103e-05,
43
+ "loss": 0.0649,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.1166109930154874,
48
+ "grad_norm": 0.10264527052640915,
49
+ "learning_rate": 1.923076923076923e-05,
50
+ "loss": 0.0686,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.13604615851806864,
55
+ "grad_norm": 0.08583803474903107,
56
+ "learning_rate": 2.2435897435897437e-05,
57
+ "loss": 0.0657,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.15548132402064987,
62
+ "grad_norm": 0.09326862543821335,
63
+ "learning_rate": 2.4999884878368972e-05,
64
+ "loss": 0.0605,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.1749164895232311,
69
+ "grad_norm": 0.09622487425804138,
70
+ "learning_rate": 2.4995855843928913e-05,
71
+ "loss": 0.0617,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.19435165502581234,
76
+ "grad_norm": 0.10855123400688171,
77
+ "learning_rate": 2.4986072848240374e-05,
78
+ "loss": 0.0548,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.21378682052839357,
83
+ "grad_norm": 0.12510105967521667,
84
+ "learning_rate": 2.4970540396075083e-05,
85
+ "loss": 0.0545,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.2332219860309748,
90
+ "grad_norm": 0.11628145724534988,
91
+ "learning_rate": 2.494926563965445e-05,
92
+ "loss": 0.0601,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.252657151533556,
97
+ "grad_norm": 0.12050613015890121,
98
+ "learning_rate": 2.4922258375356232e-05,
99
+ "loss": 0.0585,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.2720923170361373,
104
+ "grad_norm": 0.08949285745620728,
105
+ "learning_rate": 2.488953103920354e-05,
106
+ "loss": 0.0477,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.2915274825387185,
111
+ "grad_norm": 0.14706853032112122,
112
+ "learning_rate": 2.4851098701138466e-05,
113
+ "loss": 0.0555,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.31096264804129975,
118
+ "grad_norm": 0.13029147684574127,
119
+ "learning_rate": 2.4806979058082832e-05,
120
+ "loss": 0.0493,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.330397813543881,
125
+ "grad_norm": 0.1321103274822235,
126
+ "learning_rate": 2.475719242578929e-05,
127
+ "loss": 0.0547,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.3323413300941391,
132
+ "eval_loss": 0.054034121334552765,
133
+ "eval_runtime": 172.2024,
134
+ "eval_samples_per_second": 5.029,
135
+ "eval_steps_per_second": 5.029,
136
+ "step": 171
137
+ },
138
+ {
139
+ "epoch": 0.3498329790464622,
140
+ "grad_norm": 0.14103983342647552,
141
+ "learning_rate": 2.470176172948652e-05,
142
+ "loss": 0.0522,
143
+ "step": 180
144
+ },
145
+ {
146
+ "epoch": 0.36926814454904344,
147
+ "grad_norm": 0.14120739698410034,
148
+ "learning_rate": 2.4640712493322894e-05,
149
+ "loss": 0.0501,
150
+ "step": 190
151
+ },
152
+ {
153
+ "epoch": 0.3887033100516247,
154
+ "grad_norm": 0.1301490068435669,
155
+ "learning_rate": 2.4574072828613354e-05,
156
+ "loss": 0.0492,
157
+ "step": 200
158
+ },
159
+ {
160
+ "epoch": 0.4081384755542059,
161
+ "grad_norm": 0.1606156975030899,
162
+ "learning_rate": 2.450187342089502e-05,
163
+ "loss": 0.0488,
164
+ "step": 210
165
+ },
166
+ {
167
+ "epoch": 0.42757364105678713,
168
+ "grad_norm": 0.1623048037290573,
169
+ "learning_rate": 2.442414751579744e-05,
170
+ "loss": 0.0459,
171
+ "step": 220
172
+ },
173
+ {
174
+ "epoch": 0.44700880655936837,
175
+ "grad_norm": 0.1424696147441864,
176
+ "learning_rate": 2.434093090373396e-05,
177
+ "loss": 0.0441,
178
+ "step": 230
179
+ },
180
+ {
181
+ "epoch": 0.4664439720619496,
182
+ "grad_norm": 0.13980570435523987,
183
+ "learning_rate": 2.4252261903421375e-05,
184
+ "loss": 0.0483,
185
+ "step": 240
186
+ },
187
+ {
188
+ "epoch": 0.4858791375645308,
189
+ "grad_norm": 0.17693087458610535,
190
+ "learning_rate": 2.415818134423528e-05,
191
+ "loss": 0.0451,
192
+ "step": 250
193
+ },
194
+ {
195
+ "epoch": 0.505314303067112,
196
+ "grad_norm": 0.18210634589195251,
197
+ "learning_rate": 2.405873254740942e-05,
198
+ "loss": 0.0466,
199
+ "step": 260
200
+ },
201
+ {
202
+ "epoch": 0.5247494685696933,
203
+ "grad_norm": 0.1527709811925888,
204
+ "learning_rate": 2.395396130608756e-05,
205
+ "loss": 0.0488,
206
+ "step": 270
207
+ },
208
+ {
209
+ "epoch": 0.5441846340722746,
210
+ "grad_norm": 0.15795762836933136,
211
+ "learning_rate": 2.3843915864237143e-05,
212
+ "loss": 0.0448,
213
+ "step": 280
214
+ },
215
+ {
216
+ "epoch": 0.5636197995748558,
217
+ "grad_norm": 0.15495383739471436,
218
+ "learning_rate": 2.3728646894434413e-05,
219
+ "loss": 0.047,
220
+ "step": 290
221
+ },
222
+ {
223
+ "epoch": 0.583054965077437,
224
+ "grad_norm": 0.17630279064178467,
225
+ "learning_rate": 2.3608207474531236e-05,
226
+ "loss": 0.0429,
227
+ "step": 300
228
+ },
229
+ {
230
+ "epoch": 0.6024901305800182,
231
+ "grad_norm": 0.17523537576198578,
232
+ "learning_rate": 2.3482653063214356e-05,
233
+ "loss": 0.0473,
234
+ "step": 310
235
+ },
236
+ {
237
+ "epoch": 0.6219252960825995,
238
+ "grad_norm": 0.1995943933725357,
239
+ "learning_rate": 2.3352041474468373e-05,
240
+ "loss": 0.0392,
241
+ "step": 320
242
+ },
243
+ {
244
+ "epoch": 0.6413604615851807,
245
+ "grad_norm": 0.1683543622493744,
246
+ "learning_rate": 2.3216432850954155e-05,
247
+ "loss": 0.0438,
248
+ "step": 330
249
+ },
250
+ {
251
+ "epoch": 0.660795627087762,
252
+ "grad_norm": 0.1374560445547104,
253
+ "learning_rate": 2.307588963631497e-05,
254
+ "loss": 0.0388,
255
+ "step": 340
256
+ },
257
+ {
258
+ "epoch": 0.6646826601882782,
259
+ "eval_loss": 0.048522137105464935,
260
+ "eval_runtime": 174.8794,
261
+ "eval_samples_per_second": 4.952,
262
+ "eval_steps_per_second": 4.952,
263
+ "step": 342
264
+ },
265
+ {
266
+ "epoch": 0.6802307925903431,
267
+ "grad_norm": 0.166055366396904,
268
+ "learning_rate": 2.293047654642309e-05,
269
+ "loss": 0.0469,
270
+ "step": 350
271
+ },
272
+ {
273
+ "epoch": 0.6996659580929244,
274
+ "grad_norm": 0.14119000732898712,
275
+ "learning_rate": 2.2780260539580116e-05,
276
+ "loss": 0.0424,
277
+ "step": 360
278
+ },
279
+ {
280
+ "epoch": 0.7191011235955056,
281
+ "grad_norm": 0.23057888448238373,
282
+ "learning_rate": 2.2625310785684688e-05,
283
+ "loss": 0.0451,
284
+ "step": 370
285
+ },
286
+ {
287
+ "epoch": 0.7385362890980869,
288
+ "grad_norm": 0.1304253786802292,
289
+ "learning_rate": 2.2465698634381892e-05,
290
+ "loss": 0.0434,
291
+ "step": 380
292
+ },
293
+ {
294
+ "epoch": 0.7579714546006681,
295
+ "grad_norm": 0.17952455580234528,
296
+ "learning_rate": 2.2301497582208884e-05,
297
+ "loss": 0.0409,
298
+ "step": 390
299
+ },
300
+ {
301
+ "epoch": 0.7774066201032493,
302
+ "grad_norm": 0.15560920536518097,
303
+ "learning_rate": 2.2132783238751987e-05,
304
+ "loss": 0.0465,
305
+ "step": 400
306
+ },
307
+ {
308
+ "epoch": 0.7968417856058305,
309
+ "grad_norm": 0.12066332250833511,
310
+ "learning_rate": 2.1959633291830785e-05,
311
+ "loss": 0.0433,
312
+ "step": 410
313
+ },
314
+ {
315
+ "epoch": 0.8162769511084118,
316
+ "grad_norm": 0.15932129323482513,
317
+ "learning_rate": 2.1782127471725232e-05,
318
+ "loss": 0.0458,
319
+ "step": 420
320
+ },
321
+ {
322
+ "epoch": 0.835712116610993,
323
+ "grad_norm": 0.19025273621082306,
324
+ "learning_rate": 2.160034751446231e-05,
325
+ "loss": 0.0522,
326
+ "step": 430
327
+ },
328
+ {
329
+ "epoch": 0.8551472821135743,
330
+ "grad_norm": 0.15156866610050201,
331
+ "learning_rate": 2.1414377124179038e-05,
332
+ "loss": 0.0433,
333
+ "step": 440
334
+ },
335
+ {
336
+ "epoch": 0.8745824476161554,
337
+ "grad_norm": 0.1594618409872055,
338
+ "learning_rate": 2.1224301934579293e-05,
339
+ "loss": 0.0434,
340
+ "step": 450
341
+ },
342
+ {
343
+ "epoch": 0.8940176131187367,
344
+ "grad_norm": 0.16077858209609985,
345
+ "learning_rate": 2.1030209469502098e-05,
346
+ "loss": 0.0426,
347
+ "step": 460
348
+ },
349
+ {
350
+ "epoch": 0.9134527786213179,
351
+ "grad_norm": 0.15807421505451202,
352
+ "learning_rate": 2.0832189102619543e-05,
353
+ "loss": 0.0461,
354
+ "step": 470
355
+ },
356
+ {
357
+ "epoch": 0.9328879441238992,
358
+ "grad_norm": 0.20585452020168304,
359
+ "learning_rate": 2.0630332016282927e-05,
360
+ "loss": 0.0429,
361
+ "step": 480
362
+ },
363
+ {
364
+ "epoch": 0.9523231096264804,
365
+ "grad_norm": 0.1859339326620102,
366
+ "learning_rate": 2.0424731159536083e-05,
367
+ "loss": 0.0432,
368
+ "step": 490
369
+ },
370
+ {
371
+ "epoch": 0.9717582751290617,
372
+ "grad_norm": 0.20674780011177063,
373
+ "learning_rate": 2.021548120531516e-05,
374
+ "loss": 0.0392,
375
+ "step": 500
376
+ },
377
+ {
378
+ "epoch": 0.9911934406316428,
379
+ "grad_norm": 0.22314420342445374,
380
+ "learning_rate": 2.0002678506854606e-05,
381
+ "loss": 0.041,
382
+ "step": 510
383
+ },
384
+ {
385
+ "epoch": 0.9970239902824173,
386
+ "eval_loss": 0.04610577970743179,
387
+ "eval_runtime": 175.1913,
388
+ "eval_samples_per_second": 4.943,
389
+ "eval_steps_per_second": 4.943,
390
+ "step": 513
391
+ },
392
+ {
393
+ "epoch": 1.010628606134224,
394
+ "grad_norm": 0.1707136183977127,
395
+ "learning_rate": 1.9786421053319455e-05,
396
+ "loss": 0.0468,
397
+ "step": 520
398
+ },
399
+ {
400
+ "epoch": 1.0300637716368053,
401
+ "grad_norm": 0.18121393024921417,
402
+ "learning_rate": 1.9566808424684284e-05,
403
+ "loss": 0.0361,
404
+ "step": 530
405
+ },
406
+ {
407
+ "epoch": 1.0494989371393866,
408
+ "grad_norm": 0.17815589904785156,
409
+ "learning_rate": 1.9343941745879695e-05,
410
+ "loss": 0.0418,
411
+ "step": 540
412
+ },
413
+ {
414
+ "epoch": 1.0689341026419679,
415
+ "grad_norm": 0.17634443938732147,
416
+ "learning_rate": 1.9117923640227314e-05,
417
+ "loss": 0.0437,
418
+ "step": 550
419
+ },
420
+ {
421
+ "epoch": 1.088369268144549,
422
+ "grad_norm": 0.18102432787418365,
423
+ "learning_rate": 1.8888858182184925e-05,
424
+ "loss": 0.0426,
425
+ "step": 560
426
+ },
427
+ {
428
+ "epoch": 1.1078044336471302,
429
+ "grad_norm": 0.20596696436405182,
430
+ "learning_rate": 1.865685084942331e-05,
431
+ "loss": 0.0414,
432
+ "step": 570
433
+ },
434
+ {
435
+ "epoch": 1.1272395991497115,
436
+ "grad_norm": 0.2145373523235321,
437
+ "learning_rate": 1.8422008474257013e-05,
438
+ "loss": 0.0354,
439
+ "step": 580
440
+ },
441
+ {
442
+ "epoch": 1.1466747646522928,
443
+ "grad_norm": 0.2385258674621582,
444
+ "learning_rate": 1.8184439194451326e-05,
445
+ "loss": 0.0357,
446
+ "step": 590
447
+ },
448
+ {
449
+ "epoch": 1.166109930154874,
450
+ "grad_norm": 0.23679479956626892,
451
+ "learning_rate": 1.7944252403428097e-05,
452
+ "loss": 0.041,
453
+ "step": 600
454
+ },
455
+ {
456
+ "epoch": 1.1855450956574551,
457
+ "grad_norm": 0.1947854906320572,
458
+ "learning_rate": 1.770155869989343e-05,
459
+ "loss": 0.0407,
460
+ "step": 610
461
+ },
462
+ {
463
+ "epoch": 1.2049802611600364,
464
+ "grad_norm": 0.25392264127731323,
465
+ "learning_rate": 1.7456469836910334e-05,
466
+ "loss": 0.0405,
467
+ "step": 620
468
+ },
469
+ {
470
+ "epoch": 1.2244154266626177,
471
+ "grad_norm": 0.2636931240558624,
472
+ "learning_rate": 1.7209098670439817e-05,
473
+ "loss": 0.0442,
474
+ "step": 630
475
+ },
476
+ {
477
+ "epoch": 1.243850592165199,
478
+ "grad_norm": 0.19380666315555573,
479
+ "learning_rate": 1.695955910737419e-05,
480
+ "loss": 0.0344,
481
+ "step": 640
482
+ },
483
+ {
484
+ "epoch": 1.26328575766778,
485
+ "grad_norm": 0.2423335164785385,
486
+ "learning_rate": 1.670796605308638e-05,
487
+ "loss": 0.0415,
488
+ "step": 650
489
+ },
490
+ {
491
+ "epoch": 1.2827209231703613,
492
+ "grad_norm": 0.20322546362876892,
493
+ "learning_rate": 1.6454435358519524e-05,
494
+ "loss": 0.0381,
495
+ "step": 660
496
+ },
497
+ {
498
+ "epoch": 1.3021560886729426,
499
+ "grad_norm": 0.20570382475852966,
500
+ "learning_rate": 1.6199083766841115e-05,
501
+ "loss": 0.035,
502
+ "step": 670
503
+ },
504
+ {
505
+ "epoch": 1.321591254175524,
506
+ "grad_norm": 0.23237545788288116,
507
+ "learning_rate": 1.594202885968636e-05,
508
+ "loss": 0.0447,
509
+ "step": 680
510
+ },
511
+ {
512
+ "epoch": 1.3293653203765563,
513
+ "eval_loss": 0.0446239709854126,
514
+ "eval_runtime": 174.4977,
515
+ "eval_samples_per_second": 4.963,
516
+ "eval_steps_per_second": 4.963,
517
+ "step": 684
518
+ },
519
+ {
520
+ "epoch": 1.341026419678105,
521
+ "grad_norm": 0.2273474782705307,
522
+ "learning_rate": 1.568338900301536e-05,
523
+ "loss": 0.0406,
524
+ "step": 690
525
+ },
526
+ {
527
+ "epoch": 1.3604615851806863,
528
+ "grad_norm": 0.2105632871389389,
529
+ "learning_rate": 1.5423283292609255e-05,
530
+ "loss": 0.0384,
531
+ "step": 700
532
+ },
533
+ {
534
+ "epoch": 1.3798967506832676,
535
+ "grad_norm": 0.728905200958252,
536
+ "learning_rate": 1.5161831499230197e-05,
537
+ "loss": 0.0385,
538
+ "step": 710
539
+ },
540
+ {
541
+ "epoch": 1.3993319161858488,
542
+ "grad_norm": 0.2697829604148865,
543
+ "learning_rate": 1.4899154013470574e-05,
544
+ "loss": 0.0407,
545
+ "step": 720
546
+ },
547
+ {
548
+ "epoch": 1.4187670816884301,
549
+ "grad_norm": 0.23418137431144714,
550
+ "learning_rate": 1.4635371790316805e-05,
551
+ "loss": 0.0425,
552
+ "step": 730
553
+ },
554
+ {
555
+ "epoch": 1.4382022471910112,
556
+ "grad_norm": 0.21163399517536163,
557
+ "learning_rate": 1.437060629345325e-05,
558
+ "loss": 0.0369,
559
+ "step": 740
560
+ },
561
+ {
562
+ "epoch": 1.4576374126935925,
563
+ "grad_norm": 0.24824394285678864,
564
+ "learning_rate": 1.4104979439331889e-05,
565
+ "loss": 0.0413,
566
+ "step": 750
567
+ },
568
+ {
569
+ "epoch": 1.4770725781961738,
570
+ "grad_norm": 0.24107202887535095,
571
+ "learning_rate": 1.383861354103349e-05,
572
+ "loss": 0.0368,
573
+ "step": 760
574
+ },
575
+ {
576
+ "epoch": 1.4965077436987548,
577
+ "grad_norm": 0.25261572003364563,
578
+ "learning_rate": 1.357163125194618e-05,
579
+ "loss": 0.0437,
580
+ "step": 770
581
+ },
582
+ {
583
+ "epoch": 1.5159429092013361,
584
+ "grad_norm": 0.1909024715423584,
585
+ "learning_rate": 1.3304155509287273e-05,
586
+ "loss": 0.0376,
587
+ "step": 780
588
+ },
589
+ {
590
+ "epoch": 1.5353780747039174,
591
+ "grad_norm": 0.24820125102996826,
592
+ "learning_rate": 1.3036309477494433e-05,
593
+ "loss": 0.038,
594
+ "step": 790
595
+ },
596
+ {
597
+ "epoch": 1.5548132402064985,
598
+ "grad_norm": 0.2151060849428177,
599
+ "learning_rate": 1.27682164915122e-05,
600
+ "loss": 0.0427,
601
+ "step": 800
602
+ },
603
+ {
604
+ "epoch": 1.57424840570908,
605
+ "grad_norm": 0.2735498249530792,
606
+ "learning_rate": 1.25e-05,
607
+ "loss": 0.0405,
608
+ "step": 810
609
+ },
610
+ {
611
+ "epoch": 1.593683571211661,
612
+ "grad_norm": 0.20167022943496704,
613
+ "learning_rate": 1.2231783508487806e-05,
614
+ "loss": 0.0361,
615
+ "step": 820
616
+ },
617
+ {
618
+ "epoch": 1.6131187367142423,
619
+ "grad_norm": 0.29892289638519287,
620
+ "learning_rate": 1.1963690522505568e-05,
621
+ "loss": 0.0343,
622
+ "step": 830
623
+ },
624
+ {
625
+ "epoch": 1.6325539022168236,
626
+ "grad_norm": 0.2181161344051361,
627
+ "learning_rate": 1.169584449071273e-05,
628
+ "loss": 0.044,
629
+ "step": 840
630
+ },
631
+ {
632
+ "epoch": 1.6519890677194047,
633
+ "grad_norm": 0.24029949307441711,
634
+ "learning_rate": 1.142836874805382e-05,
635
+ "loss": 0.0347,
636
+ "step": 850
637
+ },
638
+ {
639
+ "epoch": 1.6617066504706954,
640
+ "eval_loss": 0.04329855740070343,
641
+ "eval_runtime": 170.3718,
642
+ "eval_samples_per_second": 5.083,
643
+ "eval_steps_per_second": 5.083,
644
+ "step": 855
645
+ },
646
+ {
647
+ "epoch": 1.6714242332219862,
648
+ "grad_norm": 0.23985505104064941,
649
+ "learning_rate": 1.1161386458966511e-05,
650
+ "loss": 0.0401,
651
+ "step": 860
652
+ },
653
+ {
654
+ "epoch": 1.6908593987245673,
655
+ "grad_norm": 0.22673290967941284,
656
+ "learning_rate": 1.0895020560668112e-05,
657
+ "loss": 0.0364,
658
+ "step": 870
659
+ },
660
+ {
661
+ "epoch": 1.7102945642271485,
662
+ "grad_norm": 0.22750601172447205,
663
+ "learning_rate": 1.0629393706546752e-05,
664
+ "loss": 0.039,
665
+ "step": 880
666
+ },
667
+ {
668
+ "epoch": 1.7297297297297298,
669
+ "grad_norm": 0.24846433103084564,
670
+ "learning_rate": 1.03646282096832e-05,
671
+ "loss": 0.0341,
672
+ "step": 890
673
+ },
674
+ {
675
+ "epoch": 1.749164895232311,
676
+ "grad_norm": 0.19867610931396484,
677
+ "learning_rate": 1.0100845986529429e-05,
678
+ "loss": 0.036,
679
+ "step": 900
680
+ },
681
+ {
682
+ "epoch": 1.7686000607348922,
683
+ "grad_norm": 0.2108948975801468,
684
+ "learning_rate": 9.838168500769806e-06,
685
+ "loss": 0.0359,
686
+ "step": 910
687
+ },
688
+ {
689
+ "epoch": 1.7880352262374735,
690
+ "grad_norm": 0.22352129220962524,
691
+ "learning_rate": 9.576716707390745e-06,
692
+ "loss": 0.0432,
693
+ "step": 920
694
+ },
695
+ {
696
+ "epoch": 1.8074703917400545,
697
+ "grad_norm": 0.2634281516075134,
698
+ "learning_rate": 9.316610996984642e-06,
699
+ "loss": 0.0371,
700
+ "step": 930
701
+ },
702
+ {
703
+ "epoch": 1.826905557242636,
704
+ "grad_norm": 0.261662095785141,
705
+ "learning_rate": 9.057971140313645e-06,
706
+ "loss": 0.0414,
707
+ "step": 940
708
+ },
709
+ {
710
+ "epoch": 1.846340722745217,
711
+ "grad_norm": 0.269505113363266,
712
+ "learning_rate": 8.800916233158885e-06,
713
+ "loss": 0.033,
714
+ "step": 950
715
+ },
716
+ {
717
+ "epoch": 1.8657758882477984,
718
+ "grad_norm": 0.2583780288696289,
719
+ "learning_rate": 8.545564641480484e-06,
720
+ "loss": 0.04,
721
+ "step": 960
722
+ },
723
+ {
724
+ "epoch": 1.8852110537503797,
725
+ "grad_norm": 0.2607627213001251,
726
+ "learning_rate": 8.292033946913624e-06,
727
+ "loss": 0.0354,
728
+ "step": 970
729
+ },
730
+ {
731
+ "epoch": 1.9046462192529607,
732
+ "grad_norm": 0.23419621586799622,
733
+ "learning_rate": 8.040440892625817e-06,
734
+ "loss": 0.0362,
735
+ "step": 980
736
+ },
737
+ {
738
+ "epoch": 1.924081384755542,
739
+ "grad_norm": 0.2785259783267975,
740
+ "learning_rate": 7.790901329560186e-06,
741
+ "loss": 0.0367,
742
+ "step": 990
743
+ },
744
+ {
745
+ "epoch": 1.9435165502581233,
746
+ "grad_norm": 0.2678421437740326,
747
+ "learning_rate": 7.543530163089671e-06,
748
+ "loss": 0.0363,
749
+ "step": 1000
750
+ },
751
+ {
752
+ "epoch": 1.9629517157607044,
753
+ "grad_norm": 0.2274320125579834,
754
+ "learning_rate": 7.29844130010657e-06,
755
+ "loss": 0.0395,
756
+ "step": 1010
757
+ },
758
+ {
759
+ "epoch": 1.9823868812632859,
760
+ "grad_norm": 0.2792135179042816,
761
+ "learning_rate": 7.055747596571904e-06,
762
+ "loss": 0.0374,
763
+ "step": 1020
764
+ },
765
+ {
766
+ "epoch": 1.9940479805648343,
767
+ "eval_loss": 0.04268278926610947,
768
+ "eval_runtime": 169.855,
769
+ "eval_samples_per_second": 5.098,
770
+ "eval_steps_per_second": 5.098,
771
+ "step": 1026
772
+ }
773
+ ],
774
+ "logging_steps": 10,
775
+ "max_steps": 1542,
776
+ "num_input_tokens_seen": 0,
777
+ "num_train_epochs": 3,
778
+ "save_steps": 171,
779
+ "stateful_callbacks": {
780
+ "TrainerControl": {
781
+ "args": {
782
+ "should_epoch_stop": false,
783
+ "should_evaluate": false,
784
+ "should_log": false,
785
+ "should_save": true,
786
+ "should_training_stop": false
787
+ },
788
+ "attributes": {}
789
+ }
790
+ },
791
+ "total_flos": 7.866128786040422e+17,
792
+ "train_batch_size": 1,
793
+ "trial_name": null,
794
+ "trial_params": null
795
+ }
checkpoint-1026/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1d135889b53a4fc2dcc2af42081d4ec6fb88ac6da63713e48b39ec06726249a
3
+ size 5624
checkpoint-1197/README.md ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: Salesforce/Llama-xLAM-2-8b-fc-r
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:Salesforce/Llama-xLAM-2-8b-fc-r
7
+ - lora
8
+ - sft
9
+ - transformers
10
+ - trl
11
+ ---
12
+
13
+ # Model Card for Model ID
14
+
15
+ <!-- Provide a quick summary of what the model is/does. -->
16
+
17
+
18
+
19
+ ## Model Details
20
+
21
+ ### Model Description
22
+
23
+ <!-- Provide a longer summary of what this model is. -->
24
+
25
+
26
+
27
+ - **Developed by:** [More Information Needed]
28
+ - **Funded by [optional]:** [More Information Needed]
29
+ - **Shared by [optional]:** [More Information Needed]
30
+ - **Model type:** [More Information Needed]
31
+ - **Language(s) (NLP):** [More Information Needed]
32
+ - **License:** [More Information Needed]
33
+ - **Finetuned from model [optional]:** [More Information Needed]
34
+
35
+ ### Model Sources [optional]
36
+
37
+ <!-- Provide the basic links for the model. -->
38
+
39
+ - **Repository:** [More Information Needed]
40
+ - **Paper [optional]:** [More Information Needed]
41
+ - **Demo [optional]:** [More Information Needed]
42
+
43
+ ## Uses
44
+
45
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
46
+
47
+ ### Direct Use
48
+
49
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
50
+
51
+ [More Information Needed]
52
+
53
+ ### Downstream Use [optional]
54
+
55
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
56
+
57
+ [More Information Needed]
58
+
59
+ ### Out-of-Scope Use
60
+
61
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
62
+
63
+ [More Information Needed]
64
+
65
+ ## Bias, Risks, and Limitations
66
+
67
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
68
+
69
+ [More Information Needed]
70
+
71
+ ### Recommendations
72
+
73
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
74
+
75
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
76
+
77
+ ## How to Get Started with the Model
78
+
79
+ Use the code below to get started with the model.
80
+
81
+ [More Information Needed]
82
+
83
+ ## Training Details
84
+
85
+ ### Training Data
86
+
87
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
88
+
89
+ [More Information Needed]
90
+
91
+ ### Training Procedure
92
+
93
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
94
+
95
+ #### Preprocessing [optional]
96
+
97
+ [More Information Needed]
98
+
99
+
100
+ #### Training Hyperparameters
101
+
102
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
103
+
104
+ #### Speeds, Sizes, Times [optional]
105
+
106
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
107
+
108
+ [More Information Needed]
109
+
110
+ ## Evaluation
111
+
112
+ <!-- This section describes the evaluation protocols and provides the results. -->
113
+
114
+ ### Testing Data, Factors & Metrics
115
+
116
+ #### Testing Data
117
+
118
+ <!-- This should link to a Dataset Card if possible. -->
119
+
120
+ [More Information Needed]
121
+
122
+ #### Factors
123
+
124
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
125
+
126
+ [More Information Needed]
127
+
128
+ #### Metrics
129
+
130
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
131
+
132
+ [More Information Needed]
133
+
134
+ ### Results
135
+
136
+ [More Information Needed]
137
+
138
+ #### Summary
139
+
140
+
141
+
142
+ ## Model Examination [optional]
143
+
144
+ <!-- Relevant interpretability work for the model goes here -->
145
+
146
+ [More Information Needed]
147
+
148
+ ## Environmental Impact
149
+
150
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
151
+
152
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
153
+
154
+ - **Hardware Type:** [More Information Needed]
155
+ - **Hours used:** [More Information Needed]
156
+ - **Cloud Provider:** [More Information Needed]
157
+ - **Compute Region:** [More Information Needed]
158
+ - **Carbon Emitted:** [More Information Needed]
159
+
160
+ ## Technical Specifications [optional]
161
+
162
+ ### Model Architecture and Objective
163
+
164
+ [More Information Needed]
165
+
166
+ ### Compute Infrastructure
167
+
168
+ [More Information Needed]
169
+
170
+ #### Hardware
171
+
172
+ [More Information Needed]
173
+
174
+ #### Software
175
+
176
+ [More Information Needed]
177
+
178
+ ## Citation [optional]
179
+
180
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
181
+
182
+ **BibTeX:**
183
+
184
+ [More Information Needed]
185
+
186
+ **APA:**
187
+
188
+ [More Information Needed]
189
+
190
+ ## Glossary [optional]
191
+
192
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
193
+
194
+ [More Information Needed]
195
+
196
+ ## More Information [optional]
197
+
198
+ [More Information Needed]
199
+
200
+ ## Model Card Authors [optional]
201
+
202
+ [More Information Needed]
203
+
204
+ ## Model Card Contact
205
+
206
+ [More Information Needed]
207
+ ### Framework versions
208
+
209
+ - PEFT 0.17.1
checkpoint-1197/adapter_config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "Salesforce/Llama-xLAM-2-8b-fc-r",
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
+ "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0.05,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "qalora_group_size": 16,
24
+ "r": 16,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "o_proj",
29
+ "k_proj",
30
+ "q_proj",
31
+ "v_proj"
32
+ ],
33
+ "target_parameters": null,
34
+ "task_type": "CAUSAL_LM",
35
+ "trainable_token_indices": null,
36
+ "use_dora": false,
37
+ "use_qalora": false,
38
+ "use_rslora": false
39
+ }
checkpoint-1197/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4fac2d2f50b8a56b0ebd6b1c4fa8bebc67a950ea3150bfb73fedcb34157d959
3
+ size 54560368
checkpoint-1197/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:010b38f8d9dc59d529013582999522f74d32baa2fce4041be2fcd6594e46d5e8
3
+ size 109267450
checkpoint-1197/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:952f42b9fe30af71cb9183c093ce66eb71deefe5522a086943e1b65829371475
3
+ size 14244
checkpoint-1197/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08f79aa8e60c53ad93afcf6fad6bb395ff7ae397cb62f500f6cc0c35550bf969
3
+ size 1064
checkpoint-1197/special_tokens_map.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|eot_id|>",
4
+ "<|eom_id|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|begin_of_text|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|eot_id|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|eot_id|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ }
27
+ }
checkpoint-1197/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
checkpoint-1197/tokenizer_config.json ADDED
@@ -0,0 +1,2070 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eot_id|>",
2054
+ "<|eom_id|>"
2055
+ ],
2056
+ "bos_token": "<|begin_of_text|>",
2057
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- Extract system message #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] | trim %}\n {%- set messages = messages[1:] %}\n {{- system_message + \"\\n\" }}\n{%- else %}\n {%- set system_message = \"You are a helpful assistant that can use tools. You are developed by Salesforce xLAM team.\" %}\n {% set format_instruction %}You have access to a set of tools. When using tools, make calls in a single JSON array: \n\n[{\"name\": \"tool_call_name\", \"arguments\": {\"arg1\": \"value1\", \"arg2\": \"value2\"}}, ... (additional parallel tool calls as needed)]\n\nIf no tool is suitable, state that explicitly. If the user's input lacks required parameters, ask for clarification. Do not interpret or respond until tool results are returned. Once they are available, process them or make additional calls if needed. For tasks that don't require tools, such as casual conversation or general advice, respond directly in plain text. The available tools are:{% endset %}\n {{- system_message + \"\\n\" }}\n {%- if tools is not none %}\n {{- format_instruction + \"\\n\\n\" }}\n {%- endif %}\n{%- endif %}\n\n\n{%- if tools is not none %}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- \"<|eot_id|>\" }}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {%- if message['tool_calls'] %}\n {{- \"[\" }}\n {%- for tool_call_function in message.tool_calls %}\n {%- set tool_call = tool_call_function.function %}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n {{- \"<|eot_id|>\" }}\n {%- elif message['content'] %}\n {{- message['content'] | trim + '<|eot_id|>' }}\n {%- else %}\n {{- \"[]\\n\" + '<|eot_id|>' }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>\" + \"ipython\" + \"<|end_header_id|>\\n\\n\" }}\n {%- set content = message[\"content\"] %}\n {%- if content is mapping or (content is iterable and content is not string) %}\n {{- content | tojson }}\n {%- else %}\n {{- content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}",
2058
+ "clean_up_tokenization_spaces": true,
2059
+ "eos_token": "<|eot_id|>",
2060
+ "extra_special_tokens": {},
2061
+ "model_input_names": [
2062
+ "input_ids",
2063
+ "attention_mask"
2064
+ ],
2065
+ "model_max_length": 16384,
2066
+ "pad_token": "<|eot_id|>",
2067
+ "padding_side": "right",
2068
+ "split_special_tokens": false,
2069
+ "tokenizer_class": "PreTrainedTokenizerFast"
2070
+ }
checkpoint-1197/trainer_state.json ADDED
@@ -0,0 +1,922 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.042657386511564255,
3
+ "best_model_checkpoint": "./xlam_lora_new_ete_over_size_3epoch_multi_full_eng/checkpoint-1197",
4
+ "epoch": 2.3263893106589735,
5
+ "eval_steps": 171,
6
+ "global_step": 1197,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.019435165502581234,
13
+ "grad_norm": 0.2028260976076126,
14
+ "learning_rate": 3.205128205128205e-06,
15
+ "loss": 0.0883,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.03887033100516247,
20
+ "grad_norm": 0.16199472546577454,
21
+ "learning_rate": 6.41025641025641e-06,
22
+ "loss": 0.0862,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.0583054965077437,
27
+ "grad_norm": 0.10014229267835617,
28
+ "learning_rate": 9.615384615384616e-06,
29
+ "loss": 0.0785,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.07774066201032494,
34
+ "grad_norm": 0.07943801581859589,
35
+ "learning_rate": 1.282051282051282e-05,
36
+ "loss": 0.0772,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.09717582751290617,
41
+ "grad_norm": 0.11369317024946213,
42
+ "learning_rate": 1.602564102564103e-05,
43
+ "loss": 0.0649,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.1166109930154874,
48
+ "grad_norm": 0.10264527052640915,
49
+ "learning_rate": 1.923076923076923e-05,
50
+ "loss": 0.0686,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.13604615851806864,
55
+ "grad_norm": 0.08583803474903107,
56
+ "learning_rate": 2.2435897435897437e-05,
57
+ "loss": 0.0657,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.15548132402064987,
62
+ "grad_norm": 0.09326862543821335,
63
+ "learning_rate": 2.4999884878368972e-05,
64
+ "loss": 0.0605,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.1749164895232311,
69
+ "grad_norm": 0.09622487425804138,
70
+ "learning_rate": 2.4995855843928913e-05,
71
+ "loss": 0.0617,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.19435165502581234,
76
+ "grad_norm": 0.10855123400688171,
77
+ "learning_rate": 2.4986072848240374e-05,
78
+ "loss": 0.0548,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.21378682052839357,
83
+ "grad_norm": 0.12510105967521667,
84
+ "learning_rate": 2.4970540396075083e-05,
85
+ "loss": 0.0545,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.2332219860309748,
90
+ "grad_norm": 0.11628145724534988,
91
+ "learning_rate": 2.494926563965445e-05,
92
+ "loss": 0.0601,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.252657151533556,
97
+ "grad_norm": 0.12050613015890121,
98
+ "learning_rate": 2.4922258375356232e-05,
99
+ "loss": 0.0585,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.2720923170361373,
104
+ "grad_norm": 0.08949285745620728,
105
+ "learning_rate": 2.488953103920354e-05,
106
+ "loss": 0.0477,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.2915274825387185,
111
+ "grad_norm": 0.14706853032112122,
112
+ "learning_rate": 2.4851098701138466e-05,
113
+ "loss": 0.0555,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.31096264804129975,
118
+ "grad_norm": 0.13029147684574127,
119
+ "learning_rate": 2.4806979058082832e-05,
120
+ "loss": 0.0493,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.330397813543881,
125
+ "grad_norm": 0.1321103274822235,
126
+ "learning_rate": 2.475719242578929e-05,
127
+ "loss": 0.0547,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.3323413300941391,
132
+ "eval_loss": 0.054034121334552765,
133
+ "eval_runtime": 172.2024,
134
+ "eval_samples_per_second": 5.029,
135
+ "eval_steps_per_second": 5.029,
136
+ "step": 171
137
+ },
138
+ {
139
+ "epoch": 0.3498329790464622,
140
+ "grad_norm": 0.14103983342647552,
141
+ "learning_rate": 2.470176172948652e-05,
142
+ "loss": 0.0522,
143
+ "step": 180
144
+ },
145
+ {
146
+ "epoch": 0.36926814454904344,
147
+ "grad_norm": 0.14120739698410034,
148
+ "learning_rate": 2.4640712493322894e-05,
149
+ "loss": 0.0501,
150
+ "step": 190
151
+ },
152
+ {
153
+ "epoch": 0.3887033100516247,
154
+ "grad_norm": 0.1301490068435669,
155
+ "learning_rate": 2.4574072828613354e-05,
156
+ "loss": 0.0492,
157
+ "step": 200
158
+ },
159
+ {
160
+ "epoch": 0.4081384755542059,
161
+ "grad_norm": 0.1606156975030899,
162
+ "learning_rate": 2.450187342089502e-05,
163
+ "loss": 0.0488,
164
+ "step": 210
165
+ },
166
+ {
167
+ "epoch": 0.42757364105678713,
168
+ "grad_norm": 0.1623048037290573,
169
+ "learning_rate": 2.442414751579744e-05,
170
+ "loss": 0.0459,
171
+ "step": 220
172
+ },
173
+ {
174
+ "epoch": 0.44700880655936837,
175
+ "grad_norm": 0.1424696147441864,
176
+ "learning_rate": 2.434093090373396e-05,
177
+ "loss": 0.0441,
178
+ "step": 230
179
+ },
180
+ {
181
+ "epoch": 0.4664439720619496,
182
+ "grad_norm": 0.13980570435523987,
183
+ "learning_rate": 2.4252261903421375e-05,
184
+ "loss": 0.0483,
185
+ "step": 240
186
+ },
187
+ {
188
+ "epoch": 0.4858791375645308,
189
+ "grad_norm": 0.17693087458610535,
190
+ "learning_rate": 2.415818134423528e-05,
191
+ "loss": 0.0451,
192
+ "step": 250
193
+ },
194
+ {
195
+ "epoch": 0.505314303067112,
196
+ "grad_norm": 0.18210634589195251,
197
+ "learning_rate": 2.405873254740942e-05,
198
+ "loss": 0.0466,
199
+ "step": 260
200
+ },
201
+ {
202
+ "epoch": 0.5247494685696933,
203
+ "grad_norm": 0.1527709811925888,
204
+ "learning_rate": 2.395396130608756e-05,
205
+ "loss": 0.0488,
206
+ "step": 270
207
+ },
208
+ {
209
+ "epoch": 0.5441846340722746,
210
+ "grad_norm": 0.15795762836933136,
211
+ "learning_rate": 2.3843915864237143e-05,
212
+ "loss": 0.0448,
213
+ "step": 280
214
+ },
215
+ {
216
+ "epoch": 0.5636197995748558,
217
+ "grad_norm": 0.15495383739471436,
218
+ "learning_rate": 2.3728646894434413e-05,
219
+ "loss": 0.047,
220
+ "step": 290
221
+ },
222
+ {
223
+ "epoch": 0.583054965077437,
224
+ "grad_norm": 0.17630279064178467,
225
+ "learning_rate": 2.3608207474531236e-05,
226
+ "loss": 0.0429,
227
+ "step": 300
228
+ },
229
+ {
230
+ "epoch": 0.6024901305800182,
231
+ "grad_norm": 0.17523537576198578,
232
+ "learning_rate": 2.3482653063214356e-05,
233
+ "loss": 0.0473,
234
+ "step": 310
235
+ },
236
+ {
237
+ "epoch": 0.6219252960825995,
238
+ "grad_norm": 0.1995943933725357,
239
+ "learning_rate": 2.3352041474468373e-05,
240
+ "loss": 0.0392,
241
+ "step": 320
242
+ },
243
+ {
244
+ "epoch": 0.6413604615851807,
245
+ "grad_norm": 0.1683543622493744,
246
+ "learning_rate": 2.3216432850954155e-05,
247
+ "loss": 0.0438,
248
+ "step": 330
249
+ },
250
+ {
251
+ "epoch": 0.660795627087762,
252
+ "grad_norm": 0.1374560445547104,
253
+ "learning_rate": 2.307588963631497e-05,
254
+ "loss": 0.0388,
255
+ "step": 340
256
+ },
257
+ {
258
+ "epoch": 0.6646826601882782,
259
+ "eval_loss": 0.048522137105464935,
260
+ "eval_runtime": 174.8794,
261
+ "eval_samples_per_second": 4.952,
262
+ "eval_steps_per_second": 4.952,
263
+ "step": 342
264
+ },
265
+ {
266
+ "epoch": 0.6802307925903431,
267
+ "grad_norm": 0.166055366396904,
268
+ "learning_rate": 2.293047654642309e-05,
269
+ "loss": 0.0469,
270
+ "step": 350
271
+ },
272
+ {
273
+ "epoch": 0.6996659580929244,
274
+ "grad_norm": 0.14119000732898712,
275
+ "learning_rate": 2.2780260539580116e-05,
276
+ "loss": 0.0424,
277
+ "step": 360
278
+ },
279
+ {
280
+ "epoch": 0.7191011235955056,
281
+ "grad_norm": 0.23057888448238373,
282
+ "learning_rate": 2.2625310785684688e-05,
283
+ "loss": 0.0451,
284
+ "step": 370
285
+ },
286
+ {
287
+ "epoch": 0.7385362890980869,
288
+ "grad_norm": 0.1304253786802292,
289
+ "learning_rate": 2.2465698634381892e-05,
290
+ "loss": 0.0434,
291
+ "step": 380
292
+ },
293
+ {
294
+ "epoch": 0.7579714546006681,
295
+ "grad_norm": 0.17952455580234528,
296
+ "learning_rate": 2.2301497582208884e-05,
297
+ "loss": 0.0409,
298
+ "step": 390
299
+ },
300
+ {
301
+ "epoch": 0.7774066201032493,
302
+ "grad_norm": 0.15560920536518097,
303
+ "learning_rate": 2.2132783238751987e-05,
304
+ "loss": 0.0465,
305
+ "step": 400
306
+ },
307
+ {
308
+ "epoch": 0.7968417856058305,
309
+ "grad_norm": 0.12066332250833511,
310
+ "learning_rate": 2.1959633291830785e-05,
311
+ "loss": 0.0433,
312
+ "step": 410
313
+ },
314
+ {
315
+ "epoch": 0.8162769511084118,
316
+ "grad_norm": 0.15932129323482513,
317
+ "learning_rate": 2.1782127471725232e-05,
318
+ "loss": 0.0458,
319
+ "step": 420
320
+ },
321
+ {
322
+ "epoch": 0.835712116610993,
323
+ "grad_norm": 0.19025273621082306,
324
+ "learning_rate": 2.160034751446231e-05,
325
+ "loss": 0.0522,
326
+ "step": 430
327
+ },
328
+ {
329
+ "epoch": 0.8551472821135743,
330
+ "grad_norm": 0.15156866610050201,
331
+ "learning_rate": 2.1414377124179038e-05,
332
+ "loss": 0.0433,
333
+ "step": 440
334
+ },
335
+ {
336
+ "epoch": 0.8745824476161554,
337
+ "grad_norm": 0.1594618409872055,
338
+ "learning_rate": 2.1224301934579293e-05,
339
+ "loss": 0.0434,
340
+ "step": 450
341
+ },
342
+ {
343
+ "epoch": 0.8940176131187367,
344
+ "grad_norm": 0.16077858209609985,
345
+ "learning_rate": 2.1030209469502098e-05,
346
+ "loss": 0.0426,
347
+ "step": 460
348
+ },
349
+ {
350
+ "epoch": 0.9134527786213179,
351
+ "grad_norm": 0.15807421505451202,
352
+ "learning_rate": 2.0832189102619543e-05,
353
+ "loss": 0.0461,
354
+ "step": 470
355
+ },
356
+ {
357
+ "epoch": 0.9328879441238992,
358
+ "grad_norm": 0.20585452020168304,
359
+ "learning_rate": 2.0630332016282927e-05,
360
+ "loss": 0.0429,
361
+ "step": 480
362
+ },
363
+ {
364
+ "epoch": 0.9523231096264804,
365
+ "grad_norm": 0.1859339326620102,
366
+ "learning_rate": 2.0424731159536083e-05,
367
+ "loss": 0.0432,
368
+ "step": 490
369
+ },
370
+ {
371
+ "epoch": 0.9717582751290617,
372
+ "grad_norm": 0.20674780011177063,
373
+ "learning_rate": 2.021548120531516e-05,
374
+ "loss": 0.0392,
375
+ "step": 500
376
+ },
377
+ {
378
+ "epoch": 0.9911934406316428,
379
+ "grad_norm": 0.22314420342445374,
380
+ "learning_rate": 2.0002678506854606e-05,
381
+ "loss": 0.041,
382
+ "step": 510
383
+ },
384
+ {
385
+ "epoch": 0.9970239902824173,
386
+ "eval_loss": 0.04610577970743179,
387
+ "eval_runtime": 175.1913,
388
+ "eval_samples_per_second": 4.943,
389
+ "eval_steps_per_second": 4.943,
390
+ "step": 513
391
+ },
392
+ {
393
+ "epoch": 1.010628606134224,
394
+ "grad_norm": 0.1707136183977127,
395
+ "learning_rate": 1.9786421053319455e-05,
396
+ "loss": 0.0468,
397
+ "step": 520
398
+ },
399
+ {
400
+ "epoch": 1.0300637716368053,
401
+ "grad_norm": 0.18121393024921417,
402
+ "learning_rate": 1.9566808424684284e-05,
403
+ "loss": 0.0361,
404
+ "step": 530
405
+ },
406
+ {
407
+ "epoch": 1.0494989371393866,
408
+ "grad_norm": 0.17815589904785156,
409
+ "learning_rate": 1.9343941745879695e-05,
410
+ "loss": 0.0418,
411
+ "step": 540
412
+ },
413
+ {
414
+ "epoch": 1.0689341026419679,
415
+ "grad_norm": 0.17634443938732147,
416
+ "learning_rate": 1.9117923640227314e-05,
417
+ "loss": 0.0437,
418
+ "step": 550
419
+ },
420
+ {
421
+ "epoch": 1.088369268144549,
422
+ "grad_norm": 0.18102432787418365,
423
+ "learning_rate": 1.8888858182184925e-05,
424
+ "loss": 0.0426,
425
+ "step": 560
426
+ },
427
+ {
428
+ "epoch": 1.1078044336471302,
429
+ "grad_norm": 0.20596696436405182,
430
+ "learning_rate": 1.865685084942331e-05,
431
+ "loss": 0.0414,
432
+ "step": 570
433
+ },
434
+ {
435
+ "epoch": 1.1272395991497115,
436
+ "grad_norm": 0.2145373523235321,
437
+ "learning_rate": 1.8422008474257013e-05,
438
+ "loss": 0.0354,
439
+ "step": 580
440
+ },
441
+ {
442
+ "epoch": 1.1466747646522928,
443
+ "grad_norm": 0.2385258674621582,
444
+ "learning_rate": 1.8184439194451326e-05,
445
+ "loss": 0.0357,
446
+ "step": 590
447
+ },
448
+ {
449
+ "epoch": 1.166109930154874,
450
+ "grad_norm": 0.23679479956626892,
451
+ "learning_rate": 1.7944252403428097e-05,
452
+ "loss": 0.041,
453
+ "step": 600
454
+ },
455
+ {
456
+ "epoch": 1.1855450956574551,
457
+ "grad_norm": 0.1947854906320572,
458
+ "learning_rate": 1.770155869989343e-05,
459
+ "loss": 0.0407,
460
+ "step": 610
461
+ },
462
+ {
463
+ "epoch": 1.2049802611600364,
464
+ "grad_norm": 0.25392264127731323,
465
+ "learning_rate": 1.7456469836910334e-05,
466
+ "loss": 0.0405,
467
+ "step": 620
468
+ },
469
+ {
470
+ "epoch": 1.2244154266626177,
471
+ "grad_norm": 0.2636931240558624,
472
+ "learning_rate": 1.7209098670439817e-05,
473
+ "loss": 0.0442,
474
+ "step": 630
475
+ },
476
+ {
477
+ "epoch": 1.243850592165199,
478
+ "grad_norm": 0.19380666315555573,
479
+ "learning_rate": 1.695955910737419e-05,
480
+ "loss": 0.0344,
481
+ "step": 640
482
+ },
483
+ {
484
+ "epoch": 1.26328575766778,
485
+ "grad_norm": 0.2423335164785385,
486
+ "learning_rate": 1.670796605308638e-05,
487
+ "loss": 0.0415,
488
+ "step": 650
489
+ },
490
+ {
491
+ "epoch": 1.2827209231703613,
492
+ "grad_norm": 0.20322546362876892,
493
+ "learning_rate": 1.6454435358519524e-05,
494
+ "loss": 0.0381,
495
+ "step": 660
496
+ },
497
+ {
498
+ "epoch": 1.3021560886729426,
499
+ "grad_norm": 0.20570382475852966,
500
+ "learning_rate": 1.6199083766841115e-05,
501
+ "loss": 0.035,
502
+ "step": 670
503
+ },
504
+ {
505
+ "epoch": 1.321591254175524,
506
+ "grad_norm": 0.23237545788288116,
507
+ "learning_rate": 1.594202885968636e-05,
508
+ "loss": 0.0447,
509
+ "step": 680
510
+ },
511
+ {
512
+ "epoch": 1.3293653203765563,
513
+ "eval_loss": 0.0446239709854126,
514
+ "eval_runtime": 174.4977,
515
+ "eval_samples_per_second": 4.963,
516
+ "eval_steps_per_second": 4.963,
517
+ "step": 684
518
+ },
519
+ {
520
+ "epoch": 1.341026419678105,
521
+ "grad_norm": 0.2273474782705307,
522
+ "learning_rate": 1.568338900301536e-05,
523
+ "loss": 0.0406,
524
+ "step": 690
525
+ },
526
+ {
527
+ "epoch": 1.3604615851806863,
528
+ "grad_norm": 0.2105632871389389,
529
+ "learning_rate": 1.5423283292609255e-05,
530
+ "loss": 0.0384,
531
+ "step": 700
532
+ },
533
+ {
534
+ "epoch": 1.3798967506832676,
535
+ "grad_norm": 0.728905200958252,
536
+ "learning_rate": 1.5161831499230197e-05,
537
+ "loss": 0.0385,
538
+ "step": 710
539
+ },
540
+ {
541
+ "epoch": 1.3993319161858488,
542
+ "grad_norm": 0.2697829604148865,
543
+ "learning_rate": 1.4899154013470574e-05,
544
+ "loss": 0.0407,
545
+ "step": 720
546
+ },
547
+ {
548
+ "epoch": 1.4187670816884301,
549
+ "grad_norm": 0.23418137431144714,
550
+ "learning_rate": 1.4635371790316805e-05,
551
+ "loss": 0.0425,
552
+ "step": 730
553
+ },
554
+ {
555
+ "epoch": 1.4382022471910112,
556
+ "grad_norm": 0.21163399517536163,
557
+ "learning_rate": 1.437060629345325e-05,
558
+ "loss": 0.0369,
559
+ "step": 740
560
+ },
561
+ {
562
+ "epoch": 1.4576374126935925,
563
+ "grad_norm": 0.24824394285678864,
564
+ "learning_rate": 1.4104979439331889e-05,
565
+ "loss": 0.0413,
566
+ "step": 750
567
+ },
568
+ {
569
+ "epoch": 1.4770725781961738,
570
+ "grad_norm": 0.24107202887535095,
571
+ "learning_rate": 1.383861354103349e-05,
572
+ "loss": 0.0368,
573
+ "step": 760
574
+ },
575
+ {
576
+ "epoch": 1.4965077436987548,
577
+ "grad_norm": 0.25261572003364563,
578
+ "learning_rate": 1.357163125194618e-05,
579
+ "loss": 0.0437,
580
+ "step": 770
581
+ },
582
+ {
583
+ "epoch": 1.5159429092013361,
584
+ "grad_norm": 0.1909024715423584,
585
+ "learning_rate": 1.3304155509287273e-05,
586
+ "loss": 0.0376,
587
+ "step": 780
588
+ },
589
+ {
590
+ "epoch": 1.5353780747039174,
591
+ "grad_norm": 0.24820125102996826,
592
+ "learning_rate": 1.3036309477494433e-05,
593
+ "loss": 0.038,
594
+ "step": 790
595
+ },
596
+ {
597
+ "epoch": 1.5548132402064985,
598
+ "grad_norm": 0.2151060849428177,
599
+ "learning_rate": 1.27682164915122e-05,
600
+ "loss": 0.0427,
601
+ "step": 800
602
+ },
603
+ {
604
+ "epoch": 1.57424840570908,
605
+ "grad_norm": 0.2735498249530792,
606
+ "learning_rate": 1.25e-05,
607
+ "loss": 0.0405,
608
+ "step": 810
609
+ },
610
+ {
611
+ "epoch": 1.593683571211661,
612
+ "grad_norm": 0.20167022943496704,
613
+ "learning_rate": 1.2231783508487806e-05,
614
+ "loss": 0.0361,
615
+ "step": 820
616
+ },
617
+ {
618
+ "epoch": 1.6131187367142423,
619
+ "grad_norm": 0.29892289638519287,
620
+ "learning_rate": 1.1963690522505568e-05,
621
+ "loss": 0.0343,
622
+ "step": 830
623
+ },
624
+ {
625
+ "epoch": 1.6325539022168236,
626
+ "grad_norm": 0.2181161344051361,
627
+ "learning_rate": 1.169584449071273e-05,
628
+ "loss": 0.044,
629
+ "step": 840
630
+ },
631
+ {
632
+ "epoch": 1.6519890677194047,
633
+ "grad_norm": 0.24029949307441711,
634
+ "learning_rate": 1.142836874805382e-05,
635
+ "loss": 0.0347,
636
+ "step": 850
637
+ },
638
+ {
639
+ "epoch": 1.6617066504706954,
640
+ "eval_loss": 0.04329855740070343,
641
+ "eval_runtime": 170.3718,
642
+ "eval_samples_per_second": 5.083,
643
+ "eval_steps_per_second": 5.083,
644
+ "step": 855
645
+ },
646
+ {
647
+ "epoch": 1.6714242332219862,
648
+ "grad_norm": 0.23985505104064941,
649
+ "learning_rate": 1.1161386458966511e-05,
650
+ "loss": 0.0401,
651
+ "step": 860
652
+ },
653
+ {
654
+ "epoch": 1.6908593987245673,
655
+ "grad_norm": 0.22673290967941284,
656
+ "learning_rate": 1.0895020560668112e-05,
657
+ "loss": 0.0364,
658
+ "step": 870
659
+ },
660
+ {
661
+ "epoch": 1.7102945642271485,
662
+ "grad_norm": 0.22750601172447205,
663
+ "learning_rate": 1.0629393706546752e-05,
664
+ "loss": 0.039,
665
+ "step": 880
666
+ },
667
+ {
668
+ "epoch": 1.7297297297297298,
669
+ "grad_norm": 0.24846433103084564,
670
+ "learning_rate": 1.03646282096832e-05,
671
+ "loss": 0.0341,
672
+ "step": 890
673
+ },
674
+ {
675
+ "epoch": 1.749164895232311,
676
+ "grad_norm": 0.19867610931396484,
677
+ "learning_rate": 1.0100845986529429e-05,
678
+ "loss": 0.036,
679
+ "step": 900
680
+ },
681
+ {
682
+ "epoch": 1.7686000607348922,
683
+ "grad_norm": 0.2108948975801468,
684
+ "learning_rate": 9.838168500769806e-06,
685
+ "loss": 0.0359,
686
+ "step": 910
687
+ },
688
+ {
689
+ "epoch": 1.7880352262374735,
690
+ "grad_norm": 0.22352129220962524,
691
+ "learning_rate": 9.576716707390745e-06,
692
+ "loss": 0.0432,
693
+ "step": 920
694
+ },
695
+ {
696
+ "epoch": 1.8074703917400545,
697
+ "grad_norm": 0.2634281516075134,
698
+ "learning_rate": 9.316610996984642e-06,
699
+ "loss": 0.0371,
700
+ "step": 930
701
+ },
702
+ {
703
+ "epoch": 1.826905557242636,
704
+ "grad_norm": 0.261662095785141,
705
+ "learning_rate": 9.057971140313645e-06,
706
+ "loss": 0.0414,
707
+ "step": 940
708
+ },
709
+ {
710
+ "epoch": 1.846340722745217,
711
+ "grad_norm": 0.269505113363266,
712
+ "learning_rate": 8.800916233158885e-06,
713
+ "loss": 0.033,
714
+ "step": 950
715
+ },
716
+ {
717
+ "epoch": 1.8657758882477984,
718
+ "grad_norm": 0.2583780288696289,
719
+ "learning_rate": 8.545564641480484e-06,
720
+ "loss": 0.04,
721
+ "step": 960
722
+ },
723
+ {
724
+ "epoch": 1.8852110537503797,
725
+ "grad_norm": 0.2607627213001251,
726
+ "learning_rate": 8.292033946913624e-06,
727
+ "loss": 0.0354,
728
+ "step": 970
729
+ },
730
+ {
731
+ "epoch": 1.9046462192529607,
732
+ "grad_norm": 0.23419621586799622,
733
+ "learning_rate": 8.040440892625817e-06,
734
+ "loss": 0.0362,
735
+ "step": 980
736
+ },
737
+ {
738
+ "epoch": 1.924081384755542,
739
+ "grad_norm": 0.2785259783267975,
740
+ "learning_rate": 7.790901329560186e-06,
741
+ "loss": 0.0367,
742
+ "step": 990
743
+ },
744
+ {
745
+ "epoch": 1.9435165502581233,
746
+ "grad_norm": 0.2678421437740326,
747
+ "learning_rate": 7.543530163089671e-06,
748
+ "loss": 0.0363,
749
+ "step": 1000
750
+ },
751
+ {
752
+ "epoch": 1.9629517157607044,
753
+ "grad_norm": 0.2274320125579834,
754
+ "learning_rate": 7.29844130010657e-06,
755
+ "loss": 0.0395,
756
+ "step": 1010
757
+ },
758
+ {
759
+ "epoch": 1.9823868812632859,
760
+ "grad_norm": 0.2792135179042816,
761
+ "learning_rate": 7.055747596571904e-06,
762
+ "loss": 0.0374,
763
+ "step": 1020
764
+ },
765
+ {
766
+ "epoch": 1.9940479805648343,
767
+ "eval_loss": 0.04268278926610947,
768
+ "eval_runtime": 169.855,
769
+ "eval_samples_per_second": 5.098,
770
+ "eval_steps_per_second": 5.098,
771
+ "step": 1026
772
+ },
773
+ {
774
+ "epoch": 2.001822046765867,
775
+ "grad_norm": 0.2434389591217041,
776
+ "learning_rate": 6.815560805548682e-06,
777
+ "loss": 0.0359,
778
+ "step": 1030
779
+ },
780
+ {
781
+ "epoch": 2.021257212268448,
782
+ "grad_norm": 0.2474217265844345,
783
+ "learning_rate": 6.577991525742991e-06,
784
+ "loss": 0.0301,
785
+ "step": 1040
786
+ },
787
+ {
788
+ "epoch": 2.0406923777710295,
789
+ "grad_norm": 0.21972987055778503,
790
+ "learning_rate": 6.3431491505766965e-06,
791
+ "loss": 0.031,
792
+ "step": 1050
793
+ },
794
+ {
795
+ "epoch": 2.0601275432736106,
796
+ "grad_norm": 0.26206907629966736,
797
+ "learning_rate": 6.111141817815079e-06,
798
+ "loss": 0.0367,
799
+ "step": 1060
800
+ },
801
+ {
802
+ "epoch": 2.079562708776192,
803
+ "grad_norm": 0.26828330755233765,
804
+ "learning_rate": 5.882076359772686e-06,
805
+ "loss": 0.0358,
806
+ "step": 1070
807
+ },
808
+ {
809
+ "epoch": 2.098997874278773,
810
+ "grad_norm": 0.3341256380081177,
811
+ "learning_rate": 5.656058254120305e-06,
812
+ "loss": 0.0362,
813
+ "step": 1080
814
+ },
815
+ {
816
+ "epoch": 2.118433039781354,
817
+ "grad_norm": 0.24040237069129944,
818
+ "learning_rate": 5.4331915753157175e-06,
819
+ "loss": 0.0336,
820
+ "step": 1090
821
+ },
822
+ {
823
+ "epoch": 2.1378682052839357,
824
+ "grad_norm": 0.2951533794403076,
825
+ "learning_rate": 5.213578946680547e-06,
826
+ "loss": 0.0374,
827
+ "step": 1100
828
+ },
829
+ {
830
+ "epoch": 2.157303370786517,
831
+ "grad_norm": 0.3339778184890747,
832
+ "learning_rate": 4.997321493145399e-06,
833
+ "loss": 0.0336,
834
+ "step": 1110
835
+ },
836
+ {
837
+ "epoch": 2.176738536289098,
838
+ "grad_norm": 0.28566041588783264,
839
+ "learning_rate": 4.784518794684843e-06,
840
+ "loss": 0.0305,
841
+ "step": 1120
842
+ },
843
+ {
844
+ "epoch": 2.1961737017916794,
845
+ "grad_norm": 0.23707829415798187,
846
+ "learning_rate": 4.575268840463919e-06,
847
+ "loss": 0.0347,
848
+ "step": 1130
849
+ },
850
+ {
851
+ "epoch": 2.2156088672942604,
852
+ "grad_norm": 0.268509179353714,
853
+ "learning_rate": 4.3696679837170755e-06,
854
+ "loss": 0.0338,
855
+ "step": 1140
856
+ },
857
+ {
858
+ "epoch": 2.235044032796842,
859
+ "grad_norm": 0.2489359974861145,
860
+ "learning_rate": 4.16781089738046e-06,
861
+ "loss": 0.0346,
862
+ "step": 1150
863
+ },
864
+ {
865
+ "epoch": 2.254479198299423,
866
+ "grad_norm": 0.2973306179046631,
867
+ "learning_rate": 3.969790530497904e-06,
868
+ "loss": 0.0341,
869
+ "step": 1160
870
+ },
871
+ {
872
+ "epoch": 2.273914363802004,
873
+ "grad_norm": 0.2385181337594986,
874
+ "learning_rate": 3.7756980654207032e-06,
875
+ "loss": 0.0307,
876
+ "step": 1170
877
+ },
878
+ {
879
+ "epoch": 2.2933495293045856,
880
+ "grad_norm": 0.24831977486610413,
881
+ "learning_rate": 3.585622875820965e-06,
882
+ "loss": 0.0324,
883
+ "step": 1180
884
+ },
885
+ {
886
+ "epoch": 2.3127846948071666,
887
+ "grad_norm": 0.2952122092247009,
888
+ "learning_rate": 3.399652485537691e-06,
889
+ "loss": 0.0337,
890
+ "step": 1190
891
+ },
892
+ {
893
+ "epoch": 2.3263893106589735,
894
+ "eval_loss": 0.042657386511564255,
895
+ "eval_runtime": 168.8317,
896
+ "eval_samples_per_second": 5.129,
897
+ "eval_steps_per_second": 5.129,
898
+ "step": 1197
899
+ }
900
+ ],
901
+ "logging_steps": 10,
902
+ "max_steps": 1542,
903
+ "num_input_tokens_seen": 0,
904
+ "num_train_epochs": 3,
905
+ "save_steps": 171,
906
+ "stateful_callbacks": {
907
+ "TrainerControl": {
908
+ "args": {
909
+ "should_epoch_stop": false,
910
+ "should_evaluate": false,
911
+ "should_log": false,
912
+ "should_save": true,
913
+ "should_training_stop": false
914
+ },
915
+ "attributes": {}
916
+ }
917
+ },
918
+ "total_flos": 9.178276830835507e+17,
919
+ "train_batch_size": 1,
920
+ "trial_name": null,
921
+ "trial_params": null
922
+ }
checkpoint-1197/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1d135889b53a4fc2dcc2af42081d4ec6fb88ac6da63713e48b39ec06726249a
3
+ size 5624
checkpoint-1368/README.md ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: Salesforce/Llama-xLAM-2-8b-fc-r
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:Salesforce/Llama-xLAM-2-8b-fc-r
7
+ - lora
8
+ - sft
9
+ - transformers
10
+ - trl
11
+ ---
12
+
13
+ # Model Card for Model ID
14
+
15
+ <!-- Provide a quick summary of what the model is/does. -->
16
+
17
+
18
+
19
+ ## Model Details
20
+
21
+ ### Model Description
22
+
23
+ <!-- Provide a longer summary of what this model is. -->
24
+
25
+
26
+
27
+ - **Developed by:** [More Information Needed]
28
+ - **Funded by [optional]:** [More Information Needed]
29
+ - **Shared by [optional]:** [More Information Needed]
30
+ - **Model type:** [More Information Needed]
31
+ - **Language(s) (NLP):** [More Information Needed]
32
+ - **License:** [More Information Needed]
33
+ - **Finetuned from model [optional]:** [More Information Needed]
34
+
35
+ ### Model Sources [optional]
36
+
37
+ <!-- Provide the basic links for the model. -->
38
+
39
+ - **Repository:** [More Information Needed]
40
+ - **Paper [optional]:** [More Information Needed]
41
+ - **Demo [optional]:** [More Information Needed]
42
+
43
+ ## Uses
44
+
45
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
46
+
47
+ ### Direct Use
48
+
49
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
50
+
51
+ [More Information Needed]
52
+
53
+ ### Downstream Use [optional]
54
+
55
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
56
+
57
+ [More Information Needed]
58
+
59
+ ### Out-of-Scope Use
60
+
61
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
62
+
63
+ [More Information Needed]
64
+
65
+ ## Bias, Risks, and Limitations
66
+
67
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
68
+
69
+ [More Information Needed]
70
+
71
+ ### Recommendations
72
+
73
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
74
+
75
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
76
+
77
+ ## How to Get Started with the Model
78
+
79
+ Use the code below to get started with the model.
80
+
81
+ [More Information Needed]
82
+
83
+ ## Training Details
84
+
85
+ ### Training Data
86
+
87
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
88
+
89
+ [More Information Needed]
90
+
91
+ ### Training Procedure
92
+
93
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
94
+
95
+ #### Preprocessing [optional]
96
+
97
+ [More Information Needed]
98
+
99
+
100
+ #### Training Hyperparameters
101
+
102
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
103
+
104
+ #### Speeds, Sizes, Times [optional]
105
+
106
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
107
+
108
+ [More Information Needed]
109
+
110
+ ## Evaluation
111
+
112
+ <!-- This section describes the evaluation protocols and provides the results. -->
113
+
114
+ ### Testing Data, Factors & Metrics
115
+
116
+ #### Testing Data
117
+
118
+ <!-- This should link to a Dataset Card if possible. -->
119
+
120
+ [More Information Needed]
121
+
122
+ #### Factors
123
+
124
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
125
+
126
+ [More Information Needed]
127
+
128
+ #### Metrics
129
+
130
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
131
+
132
+ [More Information Needed]
133
+
134
+ ### Results
135
+
136
+ [More Information Needed]
137
+
138
+ #### Summary
139
+
140
+
141
+
142
+ ## Model Examination [optional]
143
+
144
+ <!-- Relevant interpretability work for the model goes here -->
145
+
146
+ [More Information Needed]
147
+
148
+ ## Environmental Impact
149
+
150
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
151
+
152
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
153
+
154
+ - **Hardware Type:** [More Information Needed]
155
+ - **Hours used:** [More Information Needed]
156
+ - **Cloud Provider:** [More Information Needed]
157
+ - **Compute Region:** [More Information Needed]
158
+ - **Carbon Emitted:** [More Information Needed]
159
+
160
+ ## Technical Specifications [optional]
161
+
162
+ ### Model Architecture and Objective
163
+
164
+ [More Information Needed]
165
+
166
+ ### Compute Infrastructure
167
+
168
+ [More Information Needed]
169
+
170
+ #### Hardware
171
+
172
+ [More Information Needed]
173
+
174
+ #### Software
175
+
176
+ [More Information Needed]
177
+
178
+ ## Citation [optional]
179
+
180
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
181
+
182
+ **BibTeX:**
183
+
184
+ [More Information Needed]
185
+
186
+ **APA:**
187
+
188
+ [More Information Needed]
189
+
190
+ ## Glossary [optional]
191
+
192
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
193
+
194
+ [More Information Needed]
195
+
196
+ ## More Information [optional]
197
+
198
+ [More Information Needed]
199
+
200
+ ## Model Card Authors [optional]
201
+
202
+ [More Information Needed]
203
+
204
+ ## Model Card Contact
205
+
206
+ [More Information Needed]
207
+ ### Framework versions
208
+
209
+ - PEFT 0.17.1
checkpoint-1368/adapter_config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "Salesforce/Llama-xLAM-2-8b-fc-r",
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
+ "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0.05,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "qalora_group_size": 16,
24
+ "r": 16,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "o_proj",
29
+ "k_proj",
30
+ "q_proj",
31
+ "v_proj"
32
+ ],
33
+ "target_parameters": null,
34
+ "task_type": "CAUSAL_LM",
35
+ "trainable_token_indices": null,
36
+ "use_dora": false,
37
+ "use_qalora": false,
38
+ "use_rslora": false
39
+ }
checkpoint-1368/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5056a27e980949f1bc64404d29bee3b90c63fe651c825806a6862b01b05a0048
3
+ size 54560368
checkpoint-1368/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:206e0dba6e5bfb914c7e704a47e24fed07d05fae5115e5d5fb9d8276b5f1f2fc
3
+ size 109267450
checkpoint-1368/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82b3ec935fd0525dfa66fdaad23f4be0d8783ec3a84ffb24ac1d00535ecad933
3
+ size 14244
checkpoint-1368/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b66d45c914fbbd2905657951b504f0c5d144b5414fb4b3884a1748fdf976ed2e
3
+ size 1064
checkpoint-1368/special_tokens_map.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|eot_id|>",
4
+ "<|eom_id|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|begin_of_text|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|eot_id|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|eot_id|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ }
27
+ }
checkpoint-1368/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
checkpoint-1368/tokenizer_config.json ADDED
@@ -0,0 +1,2070 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eot_id|>",
2054
+ "<|eom_id|>"
2055
+ ],
2056
+ "bos_token": "<|begin_of_text|>",
2057
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- Extract system message #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] | trim %}\n {%- set messages = messages[1:] %}\n {{- system_message + \"\\n\" }}\n{%- else %}\n {%- set system_message = \"You are a helpful assistant that can use tools. You are developed by Salesforce xLAM team.\" %}\n {% set format_instruction %}You have access to a set of tools. When using tools, make calls in a single JSON array: \n\n[{\"name\": \"tool_call_name\", \"arguments\": {\"arg1\": \"value1\", \"arg2\": \"value2\"}}, ... (additional parallel tool calls as needed)]\n\nIf no tool is suitable, state that explicitly. If the user's input lacks required parameters, ask for clarification. Do not interpret or respond until tool results are returned. Once they are available, process them or make additional calls if needed. For tasks that don't require tools, such as casual conversation or general advice, respond directly in plain text. The available tools are:{% endset %}\n {{- system_message + \"\\n\" }}\n {%- if tools is not none %}\n {{- format_instruction + \"\\n\\n\" }}\n {%- endif %}\n{%- endif %}\n\n\n{%- if tools is not none %}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- \"<|eot_id|>\" }}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {%- if message['tool_calls'] %}\n {{- \"[\" }}\n {%- for tool_call_function in message.tool_calls %}\n {%- set tool_call = tool_call_function.function %}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n {{- \"<|eot_id|>\" }}\n {%- elif message['content'] %}\n {{- message['content'] | trim + '<|eot_id|>' }}\n {%- else %}\n {{- \"[]\\n\" + '<|eot_id|>' }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>\" + \"ipython\" + \"<|end_header_id|>\\n\\n\" }}\n {%- set content = message[\"content\"] %}\n {%- if content is mapping or (content is iterable and content is not string) %}\n {{- content | tojson }}\n {%- else %}\n {{- content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}",
2058
+ "clean_up_tokenization_spaces": true,
2059
+ "eos_token": "<|eot_id|>",
2060
+ "extra_special_tokens": {},
2061
+ "model_input_names": [
2062
+ "input_ids",
2063
+ "attention_mask"
2064
+ ],
2065
+ "model_max_length": 16384,
2066
+ "pad_token": "<|eot_id|>",
2067
+ "padding_side": "right",
2068
+ "split_special_tokens": false,
2069
+ "tokenizer_class": "PreTrainedTokenizerFast"
2070
+ }
checkpoint-1368/trainer_state.json ADDED
@@ -0,0 +1,1049 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.042365700006484985,
3
+ "best_model_checkpoint": "./xlam_lora_new_ete_over_size_3epoch_multi_full_eng/checkpoint-1368",
4
+ "epoch": 2.6587306407531126,
5
+ "eval_steps": 171,
6
+ "global_step": 1368,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.019435165502581234,
13
+ "grad_norm": 0.2028260976076126,
14
+ "learning_rate": 3.205128205128205e-06,
15
+ "loss": 0.0883,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.03887033100516247,
20
+ "grad_norm": 0.16199472546577454,
21
+ "learning_rate": 6.41025641025641e-06,
22
+ "loss": 0.0862,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.0583054965077437,
27
+ "grad_norm": 0.10014229267835617,
28
+ "learning_rate": 9.615384615384616e-06,
29
+ "loss": 0.0785,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.07774066201032494,
34
+ "grad_norm": 0.07943801581859589,
35
+ "learning_rate": 1.282051282051282e-05,
36
+ "loss": 0.0772,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.09717582751290617,
41
+ "grad_norm": 0.11369317024946213,
42
+ "learning_rate": 1.602564102564103e-05,
43
+ "loss": 0.0649,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.1166109930154874,
48
+ "grad_norm": 0.10264527052640915,
49
+ "learning_rate": 1.923076923076923e-05,
50
+ "loss": 0.0686,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.13604615851806864,
55
+ "grad_norm": 0.08583803474903107,
56
+ "learning_rate": 2.2435897435897437e-05,
57
+ "loss": 0.0657,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.15548132402064987,
62
+ "grad_norm": 0.09326862543821335,
63
+ "learning_rate": 2.4999884878368972e-05,
64
+ "loss": 0.0605,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.1749164895232311,
69
+ "grad_norm": 0.09622487425804138,
70
+ "learning_rate": 2.4995855843928913e-05,
71
+ "loss": 0.0617,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.19435165502581234,
76
+ "grad_norm": 0.10855123400688171,
77
+ "learning_rate": 2.4986072848240374e-05,
78
+ "loss": 0.0548,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.21378682052839357,
83
+ "grad_norm": 0.12510105967521667,
84
+ "learning_rate": 2.4970540396075083e-05,
85
+ "loss": 0.0545,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.2332219860309748,
90
+ "grad_norm": 0.11628145724534988,
91
+ "learning_rate": 2.494926563965445e-05,
92
+ "loss": 0.0601,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.252657151533556,
97
+ "grad_norm": 0.12050613015890121,
98
+ "learning_rate": 2.4922258375356232e-05,
99
+ "loss": 0.0585,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.2720923170361373,
104
+ "grad_norm": 0.08949285745620728,
105
+ "learning_rate": 2.488953103920354e-05,
106
+ "loss": 0.0477,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.2915274825387185,
111
+ "grad_norm": 0.14706853032112122,
112
+ "learning_rate": 2.4851098701138466e-05,
113
+ "loss": 0.0555,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.31096264804129975,
118
+ "grad_norm": 0.13029147684574127,
119
+ "learning_rate": 2.4806979058082832e-05,
120
+ "loss": 0.0493,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.330397813543881,
125
+ "grad_norm": 0.1321103274822235,
126
+ "learning_rate": 2.475719242578929e-05,
127
+ "loss": 0.0547,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.3323413300941391,
132
+ "eval_loss": 0.054034121334552765,
133
+ "eval_runtime": 172.2024,
134
+ "eval_samples_per_second": 5.029,
135
+ "eval_steps_per_second": 5.029,
136
+ "step": 171
137
+ },
138
+ {
139
+ "epoch": 0.3498329790464622,
140
+ "grad_norm": 0.14103983342647552,
141
+ "learning_rate": 2.470176172948652e-05,
142
+ "loss": 0.0522,
143
+ "step": 180
144
+ },
145
+ {
146
+ "epoch": 0.36926814454904344,
147
+ "grad_norm": 0.14120739698410034,
148
+ "learning_rate": 2.4640712493322894e-05,
149
+ "loss": 0.0501,
150
+ "step": 190
151
+ },
152
+ {
153
+ "epoch": 0.3887033100516247,
154
+ "grad_norm": 0.1301490068435669,
155
+ "learning_rate": 2.4574072828613354e-05,
156
+ "loss": 0.0492,
157
+ "step": 200
158
+ },
159
+ {
160
+ "epoch": 0.4081384755542059,
161
+ "grad_norm": 0.1606156975030899,
162
+ "learning_rate": 2.450187342089502e-05,
163
+ "loss": 0.0488,
164
+ "step": 210
165
+ },
166
+ {
167
+ "epoch": 0.42757364105678713,
168
+ "grad_norm": 0.1623048037290573,
169
+ "learning_rate": 2.442414751579744e-05,
170
+ "loss": 0.0459,
171
+ "step": 220
172
+ },
173
+ {
174
+ "epoch": 0.44700880655936837,
175
+ "grad_norm": 0.1424696147441864,
176
+ "learning_rate": 2.434093090373396e-05,
177
+ "loss": 0.0441,
178
+ "step": 230
179
+ },
180
+ {
181
+ "epoch": 0.4664439720619496,
182
+ "grad_norm": 0.13980570435523987,
183
+ "learning_rate": 2.4252261903421375e-05,
184
+ "loss": 0.0483,
185
+ "step": 240
186
+ },
187
+ {
188
+ "epoch": 0.4858791375645308,
189
+ "grad_norm": 0.17693087458610535,
190
+ "learning_rate": 2.415818134423528e-05,
191
+ "loss": 0.0451,
192
+ "step": 250
193
+ },
194
+ {
195
+ "epoch": 0.505314303067112,
196
+ "grad_norm": 0.18210634589195251,
197
+ "learning_rate": 2.405873254740942e-05,
198
+ "loss": 0.0466,
199
+ "step": 260
200
+ },
201
+ {
202
+ "epoch": 0.5247494685696933,
203
+ "grad_norm": 0.1527709811925888,
204
+ "learning_rate": 2.395396130608756e-05,
205
+ "loss": 0.0488,
206
+ "step": 270
207
+ },
208
+ {
209
+ "epoch": 0.5441846340722746,
210
+ "grad_norm": 0.15795762836933136,
211
+ "learning_rate": 2.3843915864237143e-05,
212
+ "loss": 0.0448,
213
+ "step": 280
214
+ },
215
+ {
216
+ "epoch": 0.5636197995748558,
217
+ "grad_norm": 0.15495383739471436,
218
+ "learning_rate": 2.3728646894434413e-05,
219
+ "loss": 0.047,
220
+ "step": 290
221
+ },
222
+ {
223
+ "epoch": 0.583054965077437,
224
+ "grad_norm": 0.17630279064178467,
225
+ "learning_rate": 2.3608207474531236e-05,
226
+ "loss": 0.0429,
227
+ "step": 300
228
+ },
229
+ {
230
+ "epoch": 0.6024901305800182,
231
+ "grad_norm": 0.17523537576198578,
232
+ "learning_rate": 2.3482653063214356e-05,
233
+ "loss": 0.0473,
234
+ "step": 310
235
+ },
236
+ {
237
+ "epoch": 0.6219252960825995,
238
+ "grad_norm": 0.1995943933725357,
239
+ "learning_rate": 2.3352041474468373e-05,
240
+ "loss": 0.0392,
241
+ "step": 320
242
+ },
243
+ {
244
+ "epoch": 0.6413604615851807,
245
+ "grad_norm": 0.1683543622493744,
246
+ "learning_rate": 2.3216432850954155e-05,
247
+ "loss": 0.0438,
248
+ "step": 330
249
+ },
250
+ {
251
+ "epoch": 0.660795627087762,
252
+ "grad_norm": 0.1374560445547104,
253
+ "learning_rate": 2.307588963631497e-05,
254
+ "loss": 0.0388,
255
+ "step": 340
256
+ },
257
+ {
258
+ "epoch": 0.6646826601882782,
259
+ "eval_loss": 0.048522137105464935,
260
+ "eval_runtime": 174.8794,
261
+ "eval_samples_per_second": 4.952,
262
+ "eval_steps_per_second": 4.952,
263
+ "step": 342
264
+ },
265
+ {
266
+ "epoch": 0.6802307925903431,
267
+ "grad_norm": 0.166055366396904,
268
+ "learning_rate": 2.293047654642309e-05,
269
+ "loss": 0.0469,
270
+ "step": 350
271
+ },
272
+ {
273
+ "epoch": 0.6996659580929244,
274
+ "grad_norm": 0.14119000732898712,
275
+ "learning_rate": 2.2780260539580116e-05,
276
+ "loss": 0.0424,
277
+ "step": 360
278
+ },
279
+ {
280
+ "epoch": 0.7191011235955056,
281
+ "grad_norm": 0.23057888448238373,
282
+ "learning_rate": 2.2625310785684688e-05,
283
+ "loss": 0.0451,
284
+ "step": 370
285
+ },
286
+ {
287
+ "epoch": 0.7385362890980869,
288
+ "grad_norm": 0.1304253786802292,
289
+ "learning_rate": 2.2465698634381892e-05,
290
+ "loss": 0.0434,
291
+ "step": 380
292
+ },
293
+ {
294
+ "epoch": 0.7579714546006681,
295
+ "grad_norm": 0.17952455580234528,
296
+ "learning_rate": 2.2301497582208884e-05,
297
+ "loss": 0.0409,
298
+ "step": 390
299
+ },
300
+ {
301
+ "epoch": 0.7774066201032493,
302
+ "grad_norm": 0.15560920536518097,
303
+ "learning_rate": 2.2132783238751987e-05,
304
+ "loss": 0.0465,
305
+ "step": 400
306
+ },
307
+ {
308
+ "epoch": 0.7968417856058305,
309
+ "grad_norm": 0.12066332250833511,
310
+ "learning_rate": 2.1959633291830785e-05,
311
+ "loss": 0.0433,
312
+ "step": 410
313
+ },
314
+ {
315
+ "epoch": 0.8162769511084118,
316
+ "grad_norm": 0.15932129323482513,
317
+ "learning_rate": 2.1782127471725232e-05,
318
+ "loss": 0.0458,
319
+ "step": 420
320
+ },
321
+ {
322
+ "epoch": 0.835712116610993,
323
+ "grad_norm": 0.19025273621082306,
324
+ "learning_rate": 2.160034751446231e-05,
325
+ "loss": 0.0522,
326
+ "step": 430
327
+ },
328
+ {
329
+ "epoch": 0.8551472821135743,
330
+ "grad_norm": 0.15156866610050201,
331
+ "learning_rate": 2.1414377124179038e-05,
332
+ "loss": 0.0433,
333
+ "step": 440
334
+ },
335
+ {
336
+ "epoch": 0.8745824476161554,
337
+ "grad_norm": 0.1594618409872055,
338
+ "learning_rate": 2.1224301934579293e-05,
339
+ "loss": 0.0434,
340
+ "step": 450
341
+ },
342
+ {
343
+ "epoch": 0.8940176131187367,
344
+ "grad_norm": 0.16077858209609985,
345
+ "learning_rate": 2.1030209469502098e-05,
346
+ "loss": 0.0426,
347
+ "step": 460
348
+ },
349
+ {
350
+ "epoch": 0.9134527786213179,
351
+ "grad_norm": 0.15807421505451202,
352
+ "learning_rate": 2.0832189102619543e-05,
353
+ "loss": 0.0461,
354
+ "step": 470
355
+ },
356
+ {
357
+ "epoch": 0.9328879441238992,
358
+ "grad_norm": 0.20585452020168304,
359
+ "learning_rate": 2.0630332016282927e-05,
360
+ "loss": 0.0429,
361
+ "step": 480
362
+ },
363
+ {
364
+ "epoch": 0.9523231096264804,
365
+ "grad_norm": 0.1859339326620102,
366
+ "learning_rate": 2.0424731159536083e-05,
367
+ "loss": 0.0432,
368
+ "step": 490
369
+ },
370
+ {
371
+ "epoch": 0.9717582751290617,
372
+ "grad_norm": 0.20674780011177063,
373
+ "learning_rate": 2.021548120531516e-05,
374
+ "loss": 0.0392,
375
+ "step": 500
376
+ },
377
+ {
378
+ "epoch": 0.9911934406316428,
379
+ "grad_norm": 0.22314420342445374,
380
+ "learning_rate": 2.0002678506854606e-05,
381
+ "loss": 0.041,
382
+ "step": 510
383
+ },
384
+ {
385
+ "epoch": 0.9970239902824173,
386
+ "eval_loss": 0.04610577970743179,
387
+ "eval_runtime": 175.1913,
388
+ "eval_samples_per_second": 4.943,
389
+ "eval_steps_per_second": 4.943,
390
+ "step": 513
391
+ },
392
+ {
393
+ "epoch": 1.010628606134224,
394
+ "grad_norm": 0.1707136183977127,
395
+ "learning_rate": 1.9786421053319455e-05,
396
+ "loss": 0.0468,
397
+ "step": 520
398
+ },
399
+ {
400
+ "epoch": 1.0300637716368053,
401
+ "grad_norm": 0.18121393024921417,
402
+ "learning_rate": 1.9566808424684284e-05,
403
+ "loss": 0.0361,
404
+ "step": 530
405
+ },
406
+ {
407
+ "epoch": 1.0494989371393866,
408
+ "grad_norm": 0.17815589904785156,
409
+ "learning_rate": 1.9343941745879695e-05,
410
+ "loss": 0.0418,
411
+ "step": 540
412
+ },
413
+ {
414
+ "epoch": 1.0689341026419679,
415
+ "grad_norm": 0.17634443938732147,
416
+ "learning_rate": 1.9117923640227314e-05,
417
+ "loss": 0.0437,
418
+ "step": 550
419
+ },
420
+ {
421
+ "epoch": 1.088369268144549,
422
+ "grad_norm": 0.18102432787418365,
423
+ "learning_rate": 1.8888858182184925e-05,
424
+ "loss": 0.0426,
425
+ "step": 560
426
+ },
427
+ {
428
+ "epoch": 1.1078044336471302,
429
+ "grad_norm": 0.20596696436405182,
430
+ "learning_rate": 1.865685084942331e-05,
431
+ "loss": 0.0414,
432
+ "step": 570
433
+ },
434
+ {
435
+ "epoch": 1.1272395991497115,
436
+ "grad_norm": 0.2145373523235321,
437
+ "learning_rate": 1.8422008474257013e-05,
438
+ "loss": 0.0354,
439
+ "step": 580
440
+ },
441
+ {
442
+ "epoch": 1.1466747646522928,
443
+ "grad_norm": 0.2385258674621582,
444
+ "learning_rate": 1.8184439194451326e-05,
445
+ "loss": 0.0357,
446
+ "step": 590
447
+ },
448
+ {
449
+ "epoch": 1.166109930154874,
450
+ "grad_norm": 0.23679479956626892,
451
+ "learning_rate": 1.7944252403428097e-05,
452
+ "loss": 0.041,
453
+ "step": 600
454
+ },
455
+ {
456
+ "epoch": 1.1855450956574551,
457
+ "grad_norm": 0.1947854906320572,
458
+ "learning_rate": 1.770155869989343e-05,
459
+ "loss": 0.0407,
460
+ "step": 610
461
+ },
462
+ {
463
+ "epoch": 1.2049802611600364,
464
+ "grad_norm": 0.25392264127731323,
465
+ "learning_rate": 1.7456469836910334e-05,
466
+ "loss": 0.0405,
467
+ "step": 620
468
+ },
469
+ {
470
+ "epoch": 1.2244154266626177,
471
+ "grad_norm": 0.2636931240558624,
472
+ "learning_rate": 1.7209098670439817e-05,
473
+ "loss": 0.0442,
474
+ "step": 630
475
+ },
476
+ {
477
+ "epoch": 1.243850592165199,
478
+ "grad_norm": 0.19380666315555573,
479
+ "learning_rate": 1.695955910737419e-05,
480
+ "loss": 0.0344,
481
+ "step": 640
482
+ },
483
+ {
484
+ "epoch": 1.26328575766778,
485
+ "grad_norm": 0.2423335164785385,
486
+ "learning_rate": 1.670796605308638e-05,
487
+ "loss": 0.0415,
488
+ "step": 650
489
+ },
490
+ {
491
+ "epoch": 1.2827209231703613,
492
+ "grad_norm": 0.20322546362876892,
493
+ "learning_rate": 1.6454435358519524e-05,
494
+ "loss": 0.0381,
495
+ "step": 660
496
+ },
497
+ {
498
+ "epoch": 1.3021560886729426,
499
+ "grad_norm": 0.20570382475852966,
500
+ "learning_rate": 1.6199083766841115e-05,
501
+ "loss": 0.035,
502
+ "step": 670
503
+ },
504
+ {
505
+ "epoch": 1.321591254175524,
506
+ "grad_norm": 0.23237545788288116,
507
+ "learning_rate": 1.594202885968636e-05,
508
+ "loss": 0.0447,
509
+ "step": 680
510
+ },
511
+ {
512
+ "epoch": 1.3293653203765563,
513
+ "eval_loss": 0.0446239709854126,
514
+ "eval_runtime": 174.4977,
515
+ "eval_samples_per_second": 4.963,
516
+ "eval_steps_per_second": 4.963,
517
+ "step": 684
518
+ },
519
+ {
520
+ "epoch": 1.341026419678105,
521
+ "grad_norm": 0.2273474782705307,
522
+ "learning_rate": 1.568338900301536e-05,
523
+ "loss": 0.0406,
524
+ "step": 690
525
+ },
526
+ {
527
+ "epoch": 1.3604615851806863,
528
+ "grad_norm": 0.2105632871389389,
529
+ "learning_rate": 1.5423283292609255e-05,
530
+ "loss": 0.0384,
531
+ "step": 700
532
+ },
533
+ {
534
+ "epoch": 1.3798967506832676,
535
+ "grad_norm": 0.728905200958252,
536
+ "learning_rate": 1.5161831499230197e-05,
537
+ "loss": 0.0385,
538
+ "step": 710
539
+ },
540
+ {
541
+ "epoch": 1.3993319161858488,
542
+ "grad_norm": 0.2697829604148865,
543
+ "learning_rate": 1.4899154013470574e-05,
544
+ "loss": 0.0407,
545
+ "step": 720
546
+ },
547
+ {
548
+ "epoch": 1.4187670816884301,
549
+ "grad_norm": 0.23418137431144714,
550
+ "learning_rate": 1.4635371790316805e-05,
551
+ "loss": 0.0425,
552
+ "step": 730
553
+ },
554
+ {
555
+ "epoch": 1.4382022471910112,
556
+ "grad_norm": 0.21163399517536163,
557
+ "learning_rate": 1.437060629345325e-05,
558
+ "loss": 0.0369,
559
+ "step": 740
560
+ },
561
+ {
562
+ "epoch": 1.4576374126935925,
563
+ "grad_norm": 0.24824394285678864,
564
+ "learning_rate": 1.4104979439331889e-05,
565
+ "loss": 0.0413,
566
+ "step": 750
567
+ },
568
+ {
569
+ "epoch": 1.4770725781961738,
570
+ "grad_norm": 0.24107202887535095,
571
+ "learning_rate": 1.383861354103349e-05,
572
+ "loss": 0.0368,
573
+ "step": 760
574
+ },
575
+ {
576
+ "epoch": 1.4965077436987548,
577
+ "grad_norm": 0.25261572003364563,
578
+ "learning_rate": 1.357163125194618e-05,
579
+ "loss": 0.0437,
580
+ "step": 770
581
+ },
582
+ {
583
+ "epoch": 1.5159429092013361,
584
+ "grad_norm": 0.1909024715423584,
585
+ "learning_rate": 1.3304155509287273e-05,
586
+ "loss": 0.0376,
587
+ "step": 780
588
+ },
589
+ {
590
+ "epoch": 1.5353780747039174,
591
+ "grad_norm": 0.24820125102996826,
592
+ "learning_rate": 1.3036309477494433e-05,
593
+ "loss": 0.038,
594
+ "step": 790
595
+ },
596
+ {
597
+ "epoch": 1.5548132402064985,
598
+ "grad_norm": 0.2151060849428177,
599
+ "learning_rate": 1.27682164915122e-05,
600
+ "loss": 0.0427,
601
+ "step": 800
602
+ },
603
+ {
604
+ "epoch": 1.57424840570908,
605
+ "grad_norm": 0.2735498249530792,
606
+ "learning_rate": 1.25e-05,
607
+ "loss": 0.0405,
608
+ "step": 810
609
+ },
610
+ {
611
+ "epoch": 1.593683571211661,
612
+ "grad_norm": 0.20167022943496704,
613
+ "learning_rate": 1.2231783508487806e-05,
614
+ "loss": 0.0361,
615
+ "step": 820
616
+ },
617
+ {
618
+ "epoch": 1.6131187367142423,
619
+ "grad_norm": 0.29892289638519287,
620
+ "learning_rate": 1.1963690522505568e-05,
621
+ "loss": 0.0343,
622
+ "step": 830
623
+ },
624
+ {
625
+ "epoch": 1.6325539022168236,
626
+ "grad_norm": 0.2181161344051361,
627
+ "learning_rate": 1.169584449071273e-05,
628
+ "loss": 0.044,
629
+ "step": 840
630
+ },
631
+ {
632
+ "epoch": 1.6519890677194047,
633
+ "grad_norm": 0.24029949307441711,
634
+ "learning_rate": 1.142836874805382e-05,
635
+ "loss": 0.0347,
636
+ "step": 850
637
+ },
638
+ {
639
+ "epoch": 1.6617066504706954,
640
+ "eval_loss": 0.04329855740070343,
641
+ "eval_runtime": 170.3718,
642
+ "eval_samples_per_second": 5.083,
643
+ "eval_steps_per_second": 5.083,
644
+ "step": 855
645
+ },
646
+ {
647
+ "epoch": 1.6714242332219862,
648
+ "grad_norm": 0.23985505104064941,
649
+ "learning_rate": 1.1161386458966511e-05,
650
+ "loss": 0.0401,
651
+ "step": 860
652
+ },
653
+ {
654
+ "epoch": 1.6908593987245673,
655
+ "grad_norm": 0.22673290967941284,
656
+ "learning_rate": 1.0895020560668112e-05,
657
+ "loss": 0.0364,
658
+ "step": 870
659
+ },
660
+ {
661
+ "epoch": 1.7102945642271485,
662
+ "grad_norm": 0.22750601172447205,
663
+ "learning_rate": 1.0629393706546752e-05,
664
+ "loss": 0.039,
665
+ "step": 880
666
+ },
667
+ {
668
+ "epoch": 1.7297297297297298,
669
+ "grad_norm": 0.24846433103084564,
670
+ "learning_rate": 1.03646282096832e-05,
671
+ "loss": 0.0341,
672
+ "step": 890
673
+ },
674
+ {
675
+ "epoch": 1.749164895232311,
676
+ "grad_norm": 0.19867610931396484,
677
+ "learning_rate": 1.0100845986529429e-05,
678
+ "loss": 0.036,
679
+ "step": 900
680
+ },
681
+ {
682
+ "epoch": 1.7686000607348922,
683
+ "grad_norm": 0.2108948975801468,
684
+ "learning_rate": 9.838168500769806e-06,
685
+ "loss": 0.0359,
686
+ "step": 910
687
+ },
688
+ {
689
+ "epoch": 1.7880352262374735,
690
+ "grad_norm": 0.22352129220962524,
691
+ "learning_rate": 9.576716707390745e-06,
692
+ "loss": 0.0432,
693
+ "step": 920
694
+ },
695
+ {
696
+ "epoch": 1.8074703917400545,
697
+ "grad_norm": 0.2634281516075134,
698
+ "learning_rate": 9.316610996984642e-06,
699
+ "loss": 0.0371,
700
+ "step": 930
701
+ },
702
+ {
703
+ "epoch": 1.826905557242636,
704
+ "grad_norm": 0.261662095785141,
705
+ "learning_rate": 9.057971140313645e-06,
706
+ "loss": 0.0414,
707
+ "step": 940
708
+ },
709
+ {
710
+ "epoch": 1.846340722745217,
711
+ "grad_norm": 0.269505113363266,
712
+ "learning_rate": 8.800916233158885e-06,
713
+ "loss": 0.033,
714
+ "step": 950
715
+ },
716
+ {
717
+ "epoch": 1.8657758882477984,
718
+ "grad_norm": 0.2583780288696289,
719
+ "learning_rate": 8.545564641480484e-06,
720
+ "loss": 0.04,
721
+ "step": 960
722
+ },
723
+ {
724
+ "epoch": 1.8852110537503797,
725
+ "grad_norm": 0.2607627213001251,
726
+ "learning_rate": 8.292033946913624e-06,
727
+ "loss": 0.0354,
728
+ "step": 970
729
+ },
730
+ {
731
+ "epoch": 1.9046462192529607,
732
+ "grad_norm": 0.23419621586799622,
733
+ "learning_rate": 8.040440892625817e-06,
734
+ "loss": 0.0362,
735
+ "step": 980
736
+ },
737
+ {
738
+ "epoch": 1.924081384755542,
739
+ "grad_norm": 0.2785259783267975,
740
+ "learning_rate": 7.790901329560186e-06,
741
+ "loss": 0.0367,
742
+ "step": 990
743
+ },
744
+ {
745
+ "epoch": 1.9435165502581233,
746
+ "grad_norm": 0.2678421437740326,
747
+ "learning_rate": 7.543530163089671e-06,
748
+ "loss": 0.0363,
749
+ "step": 1000
750
+ },
751
+ {
752
+ "epoch": 1.9629517157607044,
753
+ "grad_norm": 0.2274320125579834,
754
+ "learning_rate": 7.29844130010657e-06,
755
+ "loss": 0.0395,
756
+ "step": 1010
757
+ },
758
+ {
759
+ "epoch": 1.9823868812632859,
760
+ "grad_norm": 0.2792135179042816,
761
+ "learning_rate": 7.055747596571904e-06,
762
+ "loss": 0.0374,
763
+ "step": 1020
764
+ },
765
+ {
766
+ "epoch": 1.9940479805648343,
767
+ "eval_loss": 0.04268278926610947,
768
+ "eval_runtime": 169.855,
769
+ "eval_samples_per_second": 5.098,
770
+ "eval_steps_per_second": 5.098,
771
+ "step": 1026
772
+ },
773
+ {
774
+ "epoch": 2.001822046765867,
775
+ "grad_norm": 0.2434389591217041,
776
+ "learning_rate": 6.815560805548682e-06,
777
+ "loss": 0.0359,
778
+ "step": 1030
779
+ },
780
+ {
781
+ "epoch": 2.021257212268448,
782
+ "grad_norm": 0.2474217265844345,
783
+ "learning_rate": 6.577991525742991e-06,
784
+ "loss": 0.0301,
785
+ "step": 1040
786
+ },
787
+ {
788
+ "epoch": 2.0406923777710295,
789
+ "grad_norm": 0.21972987055778503,
790
+ "learning_rate": 6.3431491505766965e-06,
791
+ "loss": 0.031,
792
+ "step": 1050
793
+ },
794
+ {
795
+ "epoch": 2.0601275432736106,
796
+ "grad_norm": 0.26206907629966736,
797
+ "learning_rate": 6.111141817815079e-06,
798
+ "loss": 0.0367,
799
+ "step": 1060
800
+ },
801
+ {
802
+ "epoch": 2.079562708776192,
803
+ "grad_norm": 0.26828330755233765,
804
+ "learning_rate": 5.882076359772686e-06,
805
+ "loss": 0.0358,
806
+ "step": 1070
807
+ },
808
+ {
809
+ "epoch": 2.098997874278773,
810
+ "grad_norm": 0.3341256380081177,
811
+ "learning_rate": 5.656058254120305e-06,
812
+ "loss": 0.0362,
813
+ "step": 1080
814
+ },
815
+ {
816
+ "epoch": 2.118433039781354,
817
+ "grad_norm": 0.24040237069129944,
818
+ "learning_rate": 5.4331915753157175e-06,
819
+ "loss": 0.0336,
820
+ "step": 1090
821
+ },
822
+ {
823
+ "epoch": 2.1378682052839357,
824
+ "grad_norm": 0.2951533794403076,
825
+ "learning_rate": 5.213578946680547e-06,
826
+ "loss": 0.0374,
827
+ "step": 1100
828
+ },
829
+ {
830
+ "epoch": 2.157303370786517,
831
+ "grad_norm": 0.3339778184890747,
832
+ "learning_rate": 4.997321493145399e-06,
833
+ "loss": 0.0336,
834
+ "step": 1110
835
+ },
836
+ {
837
+ "epoch": 2.176738536289098,
838
+ "grad_norm": 0.28566041588783264,
839
+ "learning_rate": 4.784518794684843e-06,
840
+ "loss": 0.0305,
841
+ "step": 1120
842
+ },
843
+ {
844
+ "epoch": 2.1961737017916794,
845
+ "grad_norm": 0.23707829415798187,
846
+ "learning_rate": 4.575268840463919e-06,
847
+ "loss": 0.0347,
848
+ "step": 1130
849
+ },
850
+ {
851
+ "epoch": 2.2156088672942604,
852
+ "grad_norm": 0.268509179353714,
853
+ "learning_rate": 4.3696679837170755e-06,
854
+ "loss": 0.0338,
855
+ "step": 1140
856
+ },
857
+ {
858
+ "epoch": 2.235044032796842,
859
+ "grad_norm": 0.2489359974861145,
860
+ "learning_rate": 4.16781089738046e-06,
861
+ "loss": 0.0346,
862
+ "step": 1150
863
+ },
864
+ {
865
+ "epoch": 2.254479198299423,
866
+ "grad_norm": 0.2973306179046631,
867
+ "learning_rate": 3.969790530497904e-06,
868
+ "loss": 0.0341,
869
+ "step": 1160
870
+ },
871
+ {
872
+ "epoch": 2.273914363802004,
873
+ "grad_norm": 0.2385181337594986,
874
+ "learning_rate": 3.7756980654207032e-06,
875
+ "loss": 0.0307,
876
+ "step": 1170
877
+ },
878
+ {
879
+ "epoch": 2.2933495293045856,
880
+ "grad_norm": 0.24831977486610413,
881
+ "learning_rate": 3.585622875820965e-06,
882
+ "loss": 0.0324,
883
+ "step": 1180
884
+ },
885
+ {
886
+ "epoch": 2.3127846948071666,
887
+ "grad_norm": 0.2952122092247009,
888
+ "learning_rate": 3.399652485537691e-06,
889
+ "loss": 0.0337,
890
+ "step": 1190
891
+ },
892
+ {
893
+ "epoch": 2.3263893106589735,
894
+ "eval_loss": 0.042657386511564255,
895
+ "eval_runtime": 168.8317,
896
+ "eval_samples_per_second": 5.129,
897
+ "eval_steps_per_second": 5.129,
898
+ "step": 1197
899
+ },
900
+ {
901
+ "epoch": 2.332219860309748,
902
+ "grad_norm": 0.29106399416923523,
903
+ "learning_rate": 3.2178725282747665e-06,
904
+ "loss": 0.0346,
905
+ "step": 1200
906
+ },
907
+ {
908
+ "epoch": 2.351655025812329,
909
+ "grad_norm": 0.30523982644081116,
910
+ "learning_rate": 3.040366708169222e-06,
911
+ "loss": 0.036,
912
+ "step": 1210
913
+ },
914
+ {
915
+ "epoch": 2.3710901913149103,
916
+ "grad_norm": 0.2523854672908783,
917
+ "learning_rate": 2.8672167612480163e-06,
918
+ "loss": 0.0298,
919
+ "step": 1220
920
+ },
921
+ {
922
+ "epoch": 2.390525356817492,
923
+ "grad_norm": 0.2531813383102417,
924
+ "learning_rate": 2.698502417791121e-06,
925
+ "loss": 0.0383,
926
+ "step": 1230
927
+ },
928
+ {
929
+ "epoch": 2.409960522320073,
930
+ "grad_norm": 0.2635211944580078,
931
+ "learning_rate": 2.534301365618107e-06,
932
+ "loss": 0.0357,
933
+ "step": 1240
934
+ },
935
+ {
936
+ "epoch": 2.4293956878226544,
937
+ "grad_norm": 0.26689672470092773,
938
+ "learning_rate": 2.374689214315311e-06,
939
+ "loss": 0.0333,
940
+ "step": 1250
941
+ },
942
+ {
943
+ "epoch": 2.4488308533252354,
944
+ "grad_norm": 0.29953232407569885,
945
+ "learning_rate": 2.2197394604198853e-06,
946
+ "loss": 0.0317,
947
+ "step": 1260
948
+ },
949
+ {
950
+ "epoch": 2.4682660188278165,
951
+ "grad_norm": 0.21628761291503906,
952
+ "learning_rate": 2.069523453576909e-06,
953
+ "loss": 0.0362,
954
+ "step": 1270
955
+ },
956
+ {
957
+ "epoch": 2.487701184330398,
958
+ "grad_norm": 0.2457425892353058,
959
+ "learning_rate": 1.924110363685033e-06,
960
+ "loss": 0.0353,
961
+ "step": 1280
962
+ },
963
+ {
964
+ "epoch": 2.507136349832979,
965
+ "grad_norm": 0.3192308247089386,
966
+ "learning_rate": 1.783567149045845e-06,
967
+ "loss": 0.036,
968
+ "step": 1290
969
+ },
970
+ {
971
+ "epoch": 2.52657151533556,
972
+ "grad_norm": 0.2815244793891907,
973
+ "learning_rate": 1.6479585255316287e-06,
974
+ "loss": 0.0329,
975
+ "step": 1300
976
+ },
977
+ {
978
+ "epoch": 2.5460066808381416,
979
+ "grad_norm": 0.2549531161785126,
980
+ "learning_rate": 1.5173469367856477e-06,
981
+ "loss": 0.0382,
982
+ "step": 1310
983
+ },
984
+ {
985
+ "epoch": 2.5654418463407227,
986
+ "grad_norm": 0.25880110263824463,
987
+ "learning_rate": 1.3917925254687686e-06,
988
+ "loss": 0.0344,
989
+ "step": 1320
990
+ },
991
+ {
992
+ "epoch": 2.5848770118433038,
993
+ "grad_norm": 0.22720050811767578,
994
+ "learning_rate": 1.271353105565591e-06,
995
+ "loss": 0.0353,
996
+ "step": 1330
997
+ },
998
+ {
999
+ "epoch": 2.6043121773458853,
1000
+ "grad_norm": 0.2991912364959717,
1001
+ "learning_rate": 1.1560841357628582e-06,
1002
+ "loss": 0.0369,
1003
+ "step": 1340
1004
+ },
1005
+ {
1006
+ "epoch": 2.6237473428484663,
1007
+ "grad_norm": 0.3291521668434143,
1008
+ "learning_rate": 1.046038693912442e-06,
1009
+ "loss": 0.0331,
1010
+ "step": 1350
1011
+ },
1012
+ {
1013
+ "epoch": 2.643182508351048,
1014
+ "grad_norm": 0.24359193444252014,
1015
+ "learning_rate": 9.412674525905815e-07,
1016
+ "loss": 0.0335,
1017
+ "step": 1360
1018
+ },
1019
+ {
1020
+ "epoch": 2.6587306407531126,
1021
+ "eval_loss": 0.042365700006484985,
1022
+ "eval_runtime": 168.4092,
1023
+ "eval_samples_per_second": 5.142,
1024
+ "eval_steps_per_second": 5.142,
1025
+ "step": 1368
1026
+ }
1027
+ ],
1028
+ "logging_steps": 10,
1029
+ "max_steps": 1542,
1030
+ "num_input_tokens_seen": 0,
1031
+ "num_train_epochs": 3,
1032
+ "save_steps": 171,
1033
+ "stateful_callbacks": {
1034
+ "TrainerControl": {
1035
+ "args": {
1036
+ "should_epoch_stop": false,
1037
+ "should_evaluate": false,
1038
+ "should_log": false,
1039
+ "should_save": true,
1040
+ "should_training_stop": false
1041
+ },
1042
+ "attributes": {}
1043
+ }
1044
+ },
1045
+ "total_flos": 1.0487209339532083e+18,
1046
+ "train_batch_size": 1,
1047
+ "trial_name": null,
1048
+ "trial_params": null
1049
+ }
checkpoint-1368/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1d135889b53a4fc2dcc2af42081d4ec6fb88ac6da63713e48b39ec06726249a
3
+ size 5624
checkpoint-1539/README.md ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: Salesforce/Llama-xLAM-2-8b-fc-r
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:Salesforce/Llama-xLAM-2-8b-fc-r
7
+ - lora
8
+ - sft
9
+ - transformers
10
+ - trl
11
+ ---
12
+
13
+ # Model Card for Model ID
14
+
15
+ <!-- Provide a quick summary of what the model is/does. -->
16
+
17
+
18
+
19
+ ## Model Details
20
+
21
+ ### Model Description
22
+
23
+ <!-- Provide a longer summary of what this model is. -->
24
+
25
+
26
+
27
+ - **Developed by:** [More Information Needed]
28
+ - **Funded by [optional]:** [More Information Needed]
29
+ - **Shared by [optional]:** [More Information Needed]
30
+ - **Model type:** [More Information Needed]
31
+ - **Language(s) (NLP):** [More Information Needed]
32
+ - **License:** [More Information Needed]
33
+ - **Finetuned from model [optional]:** [More Information Needed]
34
+
35
+ ### Model Sources [optional]
36
+
37
+ <!-- Provide the basic links for the model. -->
38
+
39
+ - **Repository:** [More Information Needed]
40
+ - **Paper [optional]:** [More Information Needed]
41
+ - **Demo [optional]:** [More Information Needed]
42
+
43
+ ## Uses
44
+
45
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
46
+
47
+ ### Direct Use
48
+
49
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
50
+
51
+ [More Information Needed]
52
+
53
+ ### Downstream Use [optional]
54
+
55
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
56
+
57
+ [More Information Needed]
58
+
59
+ ### Out-of-Scope Use
60
+
61
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
62
+
63
+ [More Information Needed]
64
+
65
+ ## Bias, Risks, and Limitations
66
+
67
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
68
+
69
+ [More Information Needed]
70
+
71
+ ### Recommendations
72
+
73
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
74
+
75
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
76
+
77
+ ## How to Get Started with the Model
78
+
79
+ Use the code below to get started with the model.
80
+
81
+ [More Information Needed]
82
+
83
+ ## Training Details
84
+
85
+ ### Training Data
86
+
87
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
88
+
89
+ [More Information Needed]
90
+
91
+ ### Training Procedure
92
+
93
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
94
+
95
+ #### Preprocessing [optional]
96
+
97
+ [More Information Needed]
98
+
99
+
100
+ #### Training Hyperparameters
101
+
102
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
103
+
104
+ #### Speeds, Sizes, Times [optional]
105
+
106
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
107
+
108
+ [More Information Needed]
109
+
110
+ ## Evaluation
111
+
112
+ <!-- This section describes the evaluation protocols and provides the results. -->
113
+
114
+ ### Testing Data, Factors & Metrics
115
+
116
+ #### Testing Data
117
+
118
+ <!-- This should link to a Dataset Card if possible. -->
119
+
120
+ [More Information Needed]
121
+
122
+ #### Factors
123
+
124
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
125
+
126
+ [More Information Needed]
127
+
128
+ #### Metrics
129
+
130
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
131
+
132
+ [More Information Needed]
133
+
134
+ ### Results
135
+
136
+ [More Information Needed]
137
+
138
+ #### Summary
139
+
140
+
141
+
142
+ ## Model Examination [optional]
143
+
144
+ <!-- Relevant interpretability work for the model goes here -->
145
+
146
+ [More Information Needed]
147
+
148
+ ## Environmental Impact
149
+
150
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
151
+
152
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
153
+
154
+ - **Hardware Type:** [More Information Needed]
155
+ - **Hours used:** [More Information Needed]
156
+ - **Cloud Provider:** [More Information Needed]
157
+ - **Compute Region:** [More Information Needed]
158
+ - **Carbon Emitted:** [More Information Needed]
159
+
160
+ ## Technical Specifications [optional]
161
+
162
+ ### Model Architecture and Objective
163
+
164
+ [More Information Needed]
165
+
166
+ ### Compute Infrastructure
167
+
168
+ [More Information Needed]
169
+
170
+ #### Hardware
171
+
172
+ [More Information Needed]
173
+
174
+ #### Software
175
+
176
+ [More Information Needed]
177
+
178
+ ## Citation [optional]
179
+
180
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
181
+
182
+ **BibTeX:**
183
+
184
+ [More Information Needed]
185
+
186
+ **APA:**
187
+
188
+ [More Information Needed]
189
+
190
+ ## Glossary [optional]
191
+
192
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
193
+
194
+ [More Information Needed]
195
+
196
+ ## More Information [optional]
197
+
198
+ [More Information Needed]
199
+
200
+ ## Model Card Authors [optional]
201
+
202
+ [More Information Needed]
203
+
204
+ ## Model Card Contact
205
+
206
+ [More Information Needed]
207
+ ### Framework versions
208
+
209
+ - PEFT 0.17.1
checkpoint-1539/adapter_config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "Salesforce/Llama-xLAM-2-8b-fc-r",
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
+ "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0.05,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "qalora_group_size": 16,
24
+ "r": 16,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "o_proj",
29
+ "k_proj",
30
+ "q_proj",
31
+ "v_proj"
32
+ ],
33
+ "target_parameters": null,
34
+ "task_type": "CAUSAL_LM",
35
+ "trainable_token_indices": null,
36
+ "use_dora": false,
37
+ "use_qalora": false,
38
+ "use_rslora": false
39
+ }
checkpoint-1539/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3ded87b859177c17c5908e9bf7dbdd329d029b13e9b2d280936acb506b04ba3
3
+ size 54560368
checkpoint-1539/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9049e51e5e313f1d24a7a7cde83c3dc1312b7bec0ea41ac60357a7c10662544a
3
+ size 109267450
checkpoint-1539/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:440371463d2c7479129a4219c8223fe7d9419fd64d9894b48b4ebc0a88d5427c
3
+ size 14244
checkpoint-1539/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71622cbba67441c3b0d8f972a3215c80f05dc31a8e0bb9063cbdefe59f705359
3
+ size 1064
checkpoint-1539/special_tokens_map.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|eot_id|>",
4
+ "<|eom_id|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|begin_of_text|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|eot_id|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|eot_id|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ }
27
+ }
checkpoint-1539/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
checkpoint-1539/tokenizer_config.json ADDED
@@ -0,0 +1,2070 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eot_id|>",
2054
+ "<|eom_id|>"
2055
+ ],
2056
+ "bos_token": "<|begin_of_text|>",
2057
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- Extract system message #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] | trim %}\n {%- set messages = messages[1:] %}\n {{- system_message + \"\\n\" }}\n{%- else %}\n {%- set system_message = \"You are a helpful assistant that can use tools. You are developed by Salesforce xLAM team.\" %}\n {% set format_instruction %}You have access to a set of tools. When using tools, make calls in a single JSON array: \n\n[{\"name\": \"tool_call_name\", \"arguments\": {\"arg1\": \"value1\", \"arg2\": \"value2\"}}, ... (additional parallel tool calls as needed)]\n\nIf no tool is suitable, state that explicitly. If the user's input lacks required parameters, ask for clarification. Do not interpret or respond until tool results are returned. Once they are available, process them or make additional calls if needed. For tasks that don't require tools, such as casual conversation or general advice, respond directly in plain text. The available tools are:{% endset %}\n {{- system_message + \"\\n\" }}\n {%- if tools is not none %}\n {{- format_instruction + \"\\n\\n\" }}\n {%- endif %}\n{%- endif %}\n\n\n{%- if tools is not none %}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- \"<|eot_id|>\" }}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {%- if message['tool_calls'] %}\n {{- \"[\" }}\n {%- for tool_call_function in message.tool_calls %}\n {%- set tool_call = tool_call_function.function %}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n {{- \"<|eot_id|>\" }}\n {%- elif message['content'] %}\n {{- message['content'] | trim + '<|eot_id|>' }}\n {%- else %}\n {{- \"[]\\n\" + '<|eot_id|>' }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>\" + \"ipython\" + \"<|end_header_id|>\\n\\n\" }}\n {%- set content = message[\"content\"] %}\n {%- if content is mapping or (content is iterable and content is not string) %}\n {{- content | tojson }}\n {%- else %}\n {{- content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}",
2058
+ "clean_up_tokenization_spaces": true,
2059
+ "eos_token": "<|eot_id|>",
2060
+ "extra_special_tokens": {},
2061
+ "model_input_names": [
2062
+ "input_ids",
2063
+ "attention_mask"
2064
+ ],
2065
+ "model_max_length": 16384,
2066
+ "pad_token": "<|eot_id|>",
2067
+ "padding_side": "right",
2068
+ "split_special_tokens": false,
2069
+ "tokenizer_class": "PreTrainedTokenizerFast"
2070
+ }
checkpoint-1539/trainer_state.json ADDED
@@ -0,0 +1,1176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.042311783879995346,
3
+ "best_model_checkpoint": "./xlam_lora_new_ete_over_size_3epoch_multi_full_eng/checkpoint-1539",
4
+ "epoch": 2.9910719708472517,
5
+ "eval_steps": 171,
6
+ "global_step": 1539,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.019435165502581234,
13
+ "grad_norm": 0.2028260976076126,
14
+ "learning_rate": 3.205128205128205e-06,
15
+ "loss": 0.0883,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.03887033100516247,
20
+ "grad_norm": 0.16199472546577454,
21
+ "learning_rate": 6.41025641025641e-06,
22
+ "loss": 0.0862,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.0583054965077437,
27
+ "grad_norm": 0.10014229267835617,
28
+ "learning_rate": 9.615384615384616e-06,
29
+ "loss": 0.0785,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.07774066201032494,
34
+ "grad_norm": 0.07943801581859589,
35
+ "learning_rate": 1.282051282051282e-05,
36
+ "loss": 0.0772,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.09717582751290617,
41
+ "grad_norm": 0.11369317024946213,
42
+ "learning_rate": 1.602564102564103e-05,
43
+ "loss": 0.0649,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.1166109930154874,
48
+ "grad_norm": 0.10264527052640915,
49
+ "learning_rate": 1.923076923076923e-05,
50
+ "loss": 0.0686,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.13604615851806864,
55
+ "grad_norm": 0.08583803474903107,
56
+ "learning_rate": 2.2435897435897437e-05,
57
+ "loss": 0.0657,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.15548132402064987,
62
+ "grad_norm": 0.09326862543821335,
63
+ "learning_rate": 2.4999884878368972e-05,
64
+ "loss": 0.0605,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.1749164895232311,
69
+ "grad_norm": 0.09622487425804138,
70
+ "learning_rate": 2.4995855843928913e-05,
71
+ "loss": 0.0617,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.19435165502581234,
76
+ "grad_norm": 0.10855123400688171,
77
+ "learning_rate": 2.4986072848240374e-05,
78
+ "loss": 0.0548,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.21378682052839357,
83
+ "grad_norm": 0.12510105967521667,
84
+ "learning_rate": 2.4970540396075083e-05,
85
+ "loss": 0.0545,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.2332219860309748,
90
+ "grad_norm": 0.11628145724534988,
91
+ "learning_rate": 2.494926563965445e-05,
92
+ "loss": 0.0601,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.252657151533556,
97
+ "grad_norm": 0.12050613015890121,
98
+ "learning_rate": 2.4922258375356232e-05,
99
+ "loss": 0.0585,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.2720923170361373,
104
+ "grad_norm": 0.08949285745620728,
105
+ "learning_rate": 2.488953103920354e-05,
106
+ "loss": 0.0477,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.2915274825387185,
111
+ "grad_norm": 0.14706853032112122,
112
+ "learning_rate": 2.4851098701138466e-05,
113
+ "loss": 0.0555,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.31096264804129975,
118
+ "grad_norm": 0.13029147684574127,
119
+ "learning_rate": 2.4806979058082832e-05,
120
+ "loss": 0.0493,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.330397813543881,
125
+ "grad_norm": 0.1321103274822235,
126
+ "learning_rate": 2.475719242578929e-05,
127
+ "loss": 0.0547,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.3323413300941391,
132
+ "eval_loss": 0.054034121334552765,
133
+ "eval_runtime": 172.2024,
134
+ "eval_samples_per_second": 5.029,
135
+ "eval_steps_per_second": 5.029,
136
+ "step": 171
137
+ },
138
+ {
139
+ "epoch": 0.3498329790464622,
140
+ "grad_norm": 0.14103983342647552,
141
+ "learning_rate": 2.470176172948652e-05,
142
+ "loss": 0.0522,
143
+ "step": 180
144
+ },
145
+ {
146
+ "epoch": 0.36926814454904344,
147
+ "grad_norm": 0.14120739698410034,
148
+ "learning_rate": 2.4640712493322894e-05,
149
+ "loss": 0.0501,
150
+ "step": 190
151
+ },
152
+ {
153
+ "epoch": 0.3887033100516247,
154
+ "grad_norm": 0.1301490068435669,
155
+ "learning_rate": 2.4574072828613354e-05,
156
+ "loss": 0.0492,
157
+ "step": 200
158
+ },
159
+ {
160
+ "epoch": 0.4081384755542059,
161
+ "grad_norm": 0.1606156975030899,
162
+ "learning_rate": 2.450187342089502e-05,
163
+ "loss": 0.0488,
164
+ "step": 210
165
+ },
166
+ {
167
+ "epoch": 0.42757364105678713,
168
+ "grad_norm": 0.1623048037290573,
169
+ "learning_rate": 2.442414751579744e-05,
170
+ "loss": 0.0459,
171
+ "step": 220
172
+ },
173
+ {
174
+ "epoch": 0.44700880655936837,
175
+ "grad_norm": 0.1424696147441864,
176
+ "learning_rate": 2.434093090373396e-05,
177
+ "loss": 0.0441,
178
+ "step": 230
179
+ },
180
+ {
181
+ "epoch": 0.4664439720619496,
182
+ "grad_norm": 0.13980570435523987,
183
+ "learning_rate": 2.4252261903421375e-05,
184
+ "loss": 0.0483,
185
+ "step": 240
186
+ },
187
+ {
188
+ "epoch": 0.4858791375645308,
189
+ "grad_norm": 0.17693087458610535,
190
+ "learning_rate": 2.415818134423528e-05,
191
+ "loss": 0.0451,
192
+ "step": 250
193
+ },
194
+ {
195
+ "epoch": 0.505314303067112,
196
+ "grad_norm": 0.18210634589195251,
197
+ "learning_rate": 2.405873254740942e-05,
198
+ "loss": 0.0466,
199
+ "step": 260
200
+ },
201
+ {
202
+ "epoch": 0.5247494685696933,
203
+ "grad_norm": 0.1527709811925888,
204
+ "learning_rate": 2.395396130608756e-05,
205
+ "loss": 0.0488,
206
+ "step": 270
207
+ },
208
+ {
209
+ "epoch": 0.5441846340722746,
210
+ "grad_norm": 0.15795762836933136,
211
+ "learning_rate": 2.3843915864237143e-05,
212
+ "loss": 0.0448,
213
+ "step": 280
214
+ },
215
+ {
216
+ "epoch": 0.5636197995748558,
217
+ "grad_norm": 0.15495383739471436,
218
+ "learning_rate": 2.3728646894434413e-05,
219
+ "loss": 0.047,
220
+ "step": 290
221
+ },
222
+ {
223
+ "epoch": 0.583054965077437,
224
+ "grad_norm": 0.17630279064178467,
225
+ "learning_rate": 2.3608207474531236e-05,
226
+ "loss": 0.0429,
227
+ "step": 300
228
+ },
229
+ {
230
+ "epoch": 0.6024901305800182,
231
+ "grad_norm": 0.17523537576198578,
232
+ "learning_rate": 2.3482653063214356e-05,
233
+ "loss": 0.0473,
234
+ "step": 310
235
+ },
236
+ {
237
+ "epoch": 0.6219252960825995,
238
+ "grad_norm": 0.1995943933725357,
239
+ "learning_rate": 2.3352041474468373e-05,
240
+ "loss": 0.0392,
241
+ "step": 320
242
+ },
243
+ {
244
+ "epoch": 0.6413604615851807,
245
+ "grad_norm": 0.1683543622493744,
246
+ "learning_rate": 2.3216432850954155e-05,
247
+ "loss": 0.0438,
248
+ "step": 330
249
+ },
250
+ {
251
+ "epoch": 0.660795627087762,
252
+ "grad_norm": 0.1374560445547104,
253
+ "learning_rate": 2.307588963631497e-05,
254
+ "loss": 0.0388,
255
+ "step": 340
256
+ },
257
+ {
258
+ "epoch": 0.6646826601882782,
259
+ "eval_loss": 0.048522137105464935,
260
+ "eval_runtime": 174.8794,
261
+ "eval_samples_per_second": 4.952,
262
+ "eval_steps_per_second": 4.952,
263
+ "step": 342
264
+ },
265
+ {
266
+ "epoch": 0.6802307925903431,
267
+ "grad_norm": 0.166055366396904,
268
+ "learning_rate": 2.293047654642309e-05,
269
+ "loss": 0.0469,
270
+ "step": 350
271
+ },
272
+ {
273
+ "epoch": 0.6996659580929244,
274
+ "grad_norm": 0.14119000732898712,
275
+ "learning_rate": 2.2780260539580116e-05,
276
+ "loss": 0.0424,
277
+ "step": 360
278
+ },
279
+ {
280
+ "epoch": 0.7191011235955056,
281
+ "grad_norm": 0.23057888448238373,
282
+ "learning_rate": 2.2625310785684688e-05,
283
+ "loss": 0.0451,
284
+ "step": 370
285
+ },
286
+ {
287
+ "epoch": 0.7385362890980869,
288
+ "grad_norm": 0.1304253786802292,
289
+ "learning_rate": 2.2465698634381892e-05,
290
+ "loss": 0.0434,
291
+ "step": 380
292
+ },
293
+ {
294
+ "epoch": 0.7579714546006681,
295
+ "grad_norm": 0.17952455580234528,
296
+ "learning_rate": 2.2301497582208884e-05,
297
+ "loss": 0.0409,
298
+ "step": 390
299
+ },
300
+ {
301
+ "epoch": 0.7774066201032493,
302
+ "grad_norm": 0.15560920536518097,
303
+ "learning_rate": 2.2132783238751987e-05,
304
+ "loss": 0.0465,
305
+ "step": 400
306
+ },
307
+ {
308
+ "epoch": 0.7968417856058305,
309
+ "grad_norm": 0.12066332250833511,
310
+ "learning_rate": 2.1959633291830785e-05,
311
+ "loss": 0.0433,
312
+ "step": 410
313
+ },
314
+ {
315
+ "epoch": 0.8162769511084118,
316
+ "grad_norm": 0.15932129323482513,
317
+ "learning_rate": 2.1782127471725232e-05,
318
+ "loss": 0.0458,
319
+ "step": 420
320
+ },
321
+ {
322
+ "epoch": 0.835712116610993,
323
+ "grad_norm": 0.19025273621082306,
324
+ "learning_rate": 2.160034751446231e-05,
325
+ "loss": 0.0522,
326
+ "step": 430
327
+ },
328
+ {
329
+ "epoch": 0.8551472821135743,
330
+ "grad_norm": 0.15156866610050201,
331
+ "learning_rate": 2.1414377124179038e-05,
332
+ "loss": 0.0433,
333
+ "step": 440
334
+ },
335
+ {
336
+ "epoch": 0.8745824476161554,
337
+ "grad_norm": 0.1594618409872055,
338
+ "learning_rate": 2.1224301934579293e-05,
339
+ "loss": 0.0434,
340
+ "step": 450
341
+ },
342
+ {
343
+ "epoch": 0.8940176131187367,
344
+ "grad_norm": 0.16077858209609985,
345
+ "learning_rate": 2.1030209469502098e-05,
346
+ "loss": 0.0426,
347
+ "step": 460
348
+ },
349
+ {
350
+ "epoch": 0.9134527786213179,
351
+ "grad_norm": 0.15807421505451202,
352
+ "learning_rate": 2.0832189102619543e-05,
353
+ "loss": 0.0461,
354
+ "step": 470
355
+ },
356
+ {
357
+ "epoch": 0.9328879441238992,
358
+ "grad_norm": 0.20585452020168304,
359
+ "learning_rate": 2.0630332016282927e-05,
360
+ "loss": 0.0429,
361
+ "step": 480
362
+ },
363
+ {
364
+ "epoch": 0.9523231096264804,
365
+ "grad_norm": 0.1859339326620102,
366
+ "learning_rate": 2.0424731159536083e-05,
367
+ "loss": 0.0432,
368
+ "step": 490
369
+ },
370
+ {
371
+ "epoch": 0.9717582751290617,
372
+ "grad_norm": 0.20674780011177063,
373
+ "learning_rate": 2.021548120531516e-05,
374
+ "loss": 0.0392,
375
+ "step": 500
376
+ },
377
+ {
378
+ "epoch": 0.9911934406316428,
379
+ "grad_norm": 0.22314420342445374,
380
+ "learning_rate": 2.0002678506854606e-05,
381
+ "loss": 0.041,
382
+ "step": 510
383
+ },
384
+ {
385
+ "epoch": 0.9970239902824173,
386
+ "eval_loss": 0.04610577970743179,
387
+ "eval_runtime": 175.1913,
388
+ "eval_samples_per_second": 4.943,
389
+ "eval_steps_per_second": 4.943,
390
+ "step": 513
391
+ },
392
+ {
393
+ "epoch": 1.010628606134224,
394
+ "grad_norm": 0.1707136183977127,
395
+ "learning_rate": 1.9786421053319455e-05,
396
+ "loss": 0.0468,
397
+ "step": 520
398
+ },
399
+ {
400
+ "epoch": 1.0300637716368053,
401
+ "grad_norm": 0.18121393024921417,
402
+ "learning_rate": 1.9566808424684284e-05,
403
+ "loss": 0.0361,
404
+ "step": 530
405
+ },
406
+ {
407
+ "epoch": 1.0494989371393866,
408
+ "grad_norm": 0.17815589904785156,
409
+ "learning_rate": 1.9343941745879695e-05,
410
+ "loss": 0.0418,
411
+ "step": 540
412
+ },
413
+ {
414
+ "epoch": 1.0689341026419679,
415
+ "grad_norm": 0.17634443938732147,
416
+ "learning_rate": 1.9117923640227314e-05,
417
+ "loss": 0.0437,
418
+ "step": 550
419
+ },
420
+ {
421
+ "epoch": 1.088369268144549,
422
+ "grad_norm": 0.18102432787418365,
423
+ "learning_rate": 1.8888858182184925e-05,
424
+ "loss": 0.0426,
425
+ "step": 560
426
+ },
427
+ {
428
+ "epoch": 1.1078044336471302,
429
+ "grad_norm": 0.20596696436405182,
430
+ "learning_rate": 1.865685084942331e-05,
431
+ "loss": 0.0414,
432
+ "step": 570
433
+ },
434
+ {
435
+ "epoch": 1.1272395991497115,
436
+ "grad_norm": 0.2145373523235321,
437
+ "learning_rate": 1.8422008474257013e-05,
438
+ "loss": 0.0354,
439
+ "step": 580
440
+ },
441
+ {
442
+ "epoch": 1.1466747646522928,
443
+ "grad_norm": 0.2385258674621582,
444
+ "learning_rate": 1.8184439194451326e-05,
445
+ "loss": 0.0357,
446
+ "step": 590
447
+ },
448
+ {
449
+ "epoch": 1.166109930154874,
450
+ "grad_norm": 0.23679479956626892,
451
+ "learning_rate": 1.7944252403428097e-05,
452
+ "loss": 0.041,
453
+ "step": 600
454
+ },
455
+ {
456
+ "epoch": 1.1855450956574551,
457
+ "grad_norm": 0.1947854906320572,
458
+ "learning_rate": 1.770155869989343e-05,
459
+ "loss": 0.0407,
460
+ "step": 610
461
+ },
462
+ {
463
+ "epoch": 1.2049802611600364,
464
+ "grad_norm": 0.25392264127731323,
465
+ "learning_rate": 1.7456469836910334e-05,
466
+ "loss": 0.0405,
467
+ "step": 620
468
+ },
469
+ {
470
+ "epoch": 1.2244154266626177,
471
+ "grad_norm": 0.2636931240558624,
472
+ "learning_rate": 1.7209098670439817e-05,
473
+ "loss": 0.0442,
474
+ "step": 630
475
+ },
476
+ {
477
+ "epoch": 1.243850592165199,
478
+ "grad_norm": 0.19380666315555573,
479
+ "learning_rate": 1.695955910737419e-05,
480
+ "loss": 0.0344,
481
+ "step": 640
482
+ },
483
+ {
484
+ "epoch": 1.26328575766778,
485
+ "grad_norm": 0.2423335164785385,
486
+ "learning_rate": 1.670796605308638e-05,
487
+ "loss": 0.0415,
488
+ "step": 650
489
+ },
490
+ {
491
+ "epoch": 1.2827209231703613,
492
+ "grad_norm": 0.20322546362876892,
493
+ "learning_rate": 1.6454435358519524e-05,
494
+ "loss": 0.0381,
495
+ "step": 660
496
+ },
497
+ {
498
+ "epoch": 1.3021560886729426,
499
+ "grad_norm": 0.20570382475852966,
500
+ "learning_rate": 1.6199083766841115e-05,
501
+ "loss": 0.035,
502
+ "step": 670
503
+ },
504
+ {
505
+ "epoch": 1.321591254175524,
506
+ "grad_norm": 0.23237545788288116,
507
+ "learning_rate": 1.594202885968636e-05,
508
+ "loss": 0.0447,
509
+ "step": 680
510
+ },
511
+ {
512
+ "epoch": 1.3293653203765563,
513
+ "eval_loss": 0.0446239709854126,
514
+ "eval_runtime": 174.4977,
515
+ "eval_samples_per_second": 4.963,
516
+ "eval_steps_per_second": 4.963,
517
+ "step": 684
518
+ },
519
+ {
520
+ "epoch": 1.341026419678105,
521
+ "grad_norm": 0.2273474782705307,
522
+ "learning_rate": 1.568338900301536e-05,
523
+ "loss": 0.0406,
524
+ "step": 690
525
+ },
526
+ {
527
+ "epoch": 1.3604615851806863,
528
+ "grad_norm": 0.2105632871389389,
529
+ "learning_rate": 1.5423283292609255e-05,
530
+ "loss": 0.0384,
531
+ "step": 700
532
+ },
533
+ {
534
+ "epoch": 1.3798967506832676,
535
+ "grad_norm": 0.728905200958252,
536
+ "learning_rate": 1.5161831499230197e-05,
537
+ "loss": 0.0385,
538
+ "step": 710
539
+ },
540
+ {
541
+ "epoch": 1.3993319161858488,
542
+ "grad_norm": 0.2697829604148865,
543
+ "learning_rate": 1.4899154013470574e-05,
544
+ "loss": 0.0407,
545
+ "step": 720
546
+ },
547
+ {
548
+ "epoch": 1.4187670816884301,
549
+ "grad_norm": 0.23418137431144714,
550
+ "learning_rate": 1.4635371790316805e-05,
551
+ "loss": 0.0425,
552
+ "step": 730
553
+ },
554
+ {
555
+ "epoch": 1.4382022471910112,
556
+ "grad_norm": 0.21163399517536163,
557
+ "learning_rate": 1.437060629345325e-05,
558
+ "loss": 0.0369,
559
+ "step": 740
560
+ },
561
+ {
562
+ "epoch": 1.4576374126935925,
563
+ "grad_norm": 0.24824394285678864,
564
+ "learning_rate": 1.4104979439331889e-05,
565
+ "loss": 0.0413,
566
+ "step": 750
567
+ },
568
+ {
569
+ "epoch": 1.4770725781961738,
570
+ "grad_norm": 0.24107202887535095,
571
+ "learning_rate": 1.383861354103349e-05,
572
+ "loss": 0.0368,
573
+ "step": 760
574
+ },
575
+ {
576
+ "epoch": 1.4965077436987548,
577
+ "grad_norm": 0.25261572003364563,
578
+ "learning_rate": 1.357163125194618e-05,
579
+ "loss": 0.0437,
580
+ "step": 770
581
+ },
582
+ {
583
+ "epoch": 1.5159429092013361,
584
+ "grad_norm": 0.1909024715423584,
585
+ "learning_rate": 1.3304155509287273e-05,
586
+ "loss": 0.0376,
587
+ "step": 780
588
+ },
589
+ {
590
+ "epoch": 1.5353780747039174,
591
+ "grad_norm": 0.24820125102996826,
592
+ "learning_rate": 1.3036309477494433e-05,
593
+ "loss": 0.038,
594
+ "step": 790
595
+ },
596
+ {
597
+ "epoch": 1.5548132402064985,
598
+ "grad_norm": 0.2151060849428177,
599
+ "learning_rate": 1.27682164915122e-05,
600
+ "loss": 0.0427,
601
+ "step": 800
602
+ },
603
+ {
604
+ "epoch": 1.57424840570908,
605
+ "grad_norm": 0.2735498249530792,
606
+ "learning_rate": 1.25e-05,
607
+ "loss": 0.0405,
608
+ "step": 810
609
+ },
610
+ {
611
+ "epoch": 1.593683571211661,
612
+ "grad_norm": 0.20167022943496704,
613
+ "learning_rate": 1.2231783508487806e-05,
614
+ "loss": 0.0361,
615
+ "step": 820
616
+ },
617
+ {
618
+ "epoch": 1.6131187367142423,
619
+ "grad_norm": 0.29892289638519287,
620
+ "learning_rate": 1.1963690522505568e-05,
621
+ "loss": 0.0343,
622
+ "step": 830
623
+ },
624
+ {
625
+ "epoch": 1.6325539022168236,
626
+ "grad_norm": 0.2181161344051361,
627
+ "learning_rate": 1.169584449071273e-05,
628
+ "loss": 0.044,
629
+ "step": 840
630
+ },
631
+ {
632
+ "epoch": 1.6519890677194047,
633
+ "grad_norm": 0.24029949307441711,
634
+ "learning_rate": 1.142836874805382e-05,
635
+ "loss": 0.0347,
636
+ "step": 850
637
+ },
638
+ {
639
+ "epoch": 1.6617066504706954,
640
+ "eval_loss": 0.04329855740070343,
641
+ "eval_runtime": 170.3718,
642
+ "eval_samples_per_second": 5.083,
643
+ "eval_steps_per_second": 5.083,
644
+ "step": 855
645
+ },
646
+ {
647
+ "epoch": 1.6714242332219862,
648
+ "grad_norm": 0.23985505104064941,
649
+ "learning_rate": 1.1161386458966511e-05,
650
+ "loss": 0.0401,
651
+ "step": 860
652
+ },
653
+ {
654
+ "epoch": 1.6908593987245673,
655
+ "grad_norm": 0.22673290967941284,
656
+ "learning_rate": 1.0895020560668112e-05,
657
+ "loss": 0.0364,
658
+ "step": 870
659
+ },
660
+ {
661
+ "epoch": 1.7102945642271485,
662
+ "grad_norm": 0.22750601172447205,
663
+ "learning_rate": 1.0629393706546752e-05,
664
+ "loss": 0.039,
665
+ "step": 880
666
+ },
667
+ {
668
+ "epoch": 1.7297297297297298,
669
+ "grad_norm": 0.24846433103084564,
670
+ "learning_rate": 1.03646282096832e-05,
671
+ "loss": 0.0341,
672
+ "step": 890
673
+ },
674
+ {
675
+ "epoch": 1.749164895232311,
676
+ "grad_norm": 0.19867610931396484,
677
+ "learning_rate": 1.0100845986529429e-05,
678
+ "loss": 0.036,
679
+ "step": 900
680
+ },
681
+ {
682
+ "epoch": 1.7686000607348922,
683
+ "grad_norm": 0.2108948975801468,
684
+ "learning_rate": 9.838168500769806e-06,
685
+ "loss": 0.0359,
686
+ "step": 910
687
+ },
688
+ {
689
+ "epoch": 1.7880352262374735,
690
+ "grad_norm": 0.22352129220962524,
691
+ "learning_rate": 9.576716707390745e-06,
692
+ "loss": 0.0432,
693
+ "step": 920
694
+ },
695
+ {
696
+ "epoch": 1.8074703917400545,
697
+ "grad_norm": 0.2634281516075134,
698
+ "learning_rate": 9.316610996984642e-06,
699
+ "loss": 0.0371,
700
+ "step": 930
701
+ },
702
+ {
703
+ "epoch": 1.826905557242636,
704
+ "grad_norm": 0.261662095785141,
705
+ "learning_rate": 9.057971140313645e-06,
706
+ "loss": 0.0414,
707
+ "step": 940
708
+ },
709
+ {
710
+ "epoch": 1.846340722745217,
711
+ "grad_norm": 0.269505113363266,
712
+ "learning_rate": 8.800916233158885e-06,
713
+ "loss": 0.033,
714
+ "step": 950
715
+ },
716
+ {
717
+ "epoch": 1.8657758882477984,
718
+ "grad_norm": 0.2583780288696289,
719
+ "learning_rate": 8.545564641480484e-06,
720
+ "loss": 0.04,
721
+ "step": 960
722
+ },
723
+ {
724
+ "epoch": 1.8852110537503797,
725
+ "grad_norm": 0.2607627213001251,
726
+ "learning_rate": 8.292033946913624e-06,
727
+ "loss": 0.0354,
728
+ "step": 970
729
+ },
730
+ {
731
+ "epoch": 1.9046462192529607,
732
+ "grad_norm": 0.23419621586799622,
733
+ "learning_rate": 8.040440892625817e-06,
734
+ "loss": 0.0362,
735
+ "step": 980
736
+ },
737
+ {
738
+ "epoch": 1.924081384755542,
739
+ "grad_norm": 0.2785259783267975,
740
+ "learning_rate": 7.790901329560186e-06,
741
+ "loss": 0.0367,
742
+ "step": 990
743
+ },
744
+ {
745
+ "epoch": 1.9435165502581233,
746
+ "grad_norm": 0.2678421437740326,
747
+ "learning_rate": 7.543530163089671e-06,
748
+ "loss": 0.0363,
749
+ "step": 1000
750
+ },
751
+ {
752
+ "epoch": 1.9629517157607044,
753
+ "grad_norm": 0.2274320125579834,
754
+ "learning_rate": 7.29844130010657e-06,
755
+ "loss": 0.0395,
756
+ "step": 1010
757
+ },
758
+ {
759
+ "epoch": 1.9823868812632859,
760
+ "grad_norm": 0.2792135179042816,
761
+ "learning_rate": 7.055747596571904e-06,
762
+ "loss": 0.0374,
763
+ "step": 1020
764
+ },
765
+ {
766
+ "epoch": 1.9940479805648343,
767
+ "eval_loss": 0.04268278926610947,
768
+ "eval_runtime": 169.855,
769
+ "eval_samples_per_second": 5.098,
770
+ "eval_steps_per_second": 5.098,
771
+ "step": 1026
772
+ },
773
+ {
774
+ "epoch": 2.001822046765867,
775
+ "grad_norm": 0.2434389591217041,
776
+ "learning_rate": 6.815560805548682e-06,
777
+ "loss": 0.0359,
778
+ "step": 1030
779
+ },
780
+ {
781
+ "epoch": 2.021257212268448,
782
+ "grad_norm": 0.2474217265844345,
783
+ "learning_rate": 6.577991525742991e-06,
784
+ "loss": 0.0301,
785
+ "step": 1040
786
+ },
787
+ {
788
+ "epoch": 2.0406923777710295,
789
+ "grad_norm": 0.21972987055778503,
790
+ "learning_rate": 6.3431491505766965e-06,
791
+ "loss": 0.031,
792
+ "step": 1050
793
+ },
794
+ {
795
+ "epoch": 2.0601275432736106,
796
+ "grad_norm": 0.26206907629966736,
797
+ "learning_rate": 6.111141817815079e-06,
798
+ "loss": 0.0367,
799
+ "step": 1060
800
+ },
801
+ {
802
+ "epoch": 2.079562708776192,
803
+ "grad_norm": 0.26828330755233765,
804
+ "learning_rate": 5.882076359772686e-06,
805
+ "loss": 0.0358,
806
+ "step": 1070
807
+ },
808
+ {
809
+ "epoch": 2.098997874278773,
810
+ "grad_norm": 0.3341256380081177,
811
+ "learning_rate": 5.656058254120305e-06,
812
+ "loss": 0.0362,
813
+ "step": 1080
814
+ },
815
+ {
816
+ "epoch": 2.118433039781354,
817
+ "grad_norm": 0.24040237069129944,
818
+ "learning_rate": 5.4331915753157175e-06,
819
+ "loss": 0.0336,
820
+ "step": 1090
821
+ },
822
+ {
823
+ "epoch": 2.1378682052839357,
824
+ "grad_norm": 0.2951533794403076,
825
+ "learning_rate": 5.213578946680547e-06,
826
+ "loss": 0.0374,
827
+ "step": 1100
828
+ },
829
+ {
830
+ "epoch": 2.157303370786517,
831
+ "grad_norm": 0.3339778184890747,
832
+ "learning_rate": 4.997321493145399e-06,
833
+ "loss": 0.0336,
834
+ "step": 1110
835
+ },
836
+ {
837
+ "epoch": 2.176738536289098,
838
+ "grad_norm": 0.28566041588783264,
839
+ "learning_rate": 4.784518794684843e-06,
840
+ "loss": 0.0305,
841
+ "step": 1120
842
+ },
843
+ {
844
+ "epoch": 2.1961737017916794,
845
+ "grad_norm": 0.23707829415798187,
846
+ "learning_rate": 4.575268840463919e-06,
847
+ "loss": 0.0347,
848
+ "step": 1130
849
+ },
850
+ {
851
+ "epoch": 2.2156088672942604,
852
+ "grad_norm": 0.268509179353714,
853
+ "learning_rate": 4.3696679837170755e-06,
854
+ "loss": 0.0338,
855
+ "step": 1140
856
+ },
857
+ {
858
+ "epoch": 2.235044032796842,
859
+ "grad_norm": 0.2489359974861145,
860
+ "learning_rate": 4.16781089738046e-06,
861
+ "loss": 0.0346,
862
+ "step": 1150
863
+ },
864
+ {
865
+ "epoch": 2.254479198299423,
866
+ "grad_norm": 0.2973306179046631,
867
+ "learning_rate": 3.969790530497904e-06,
868
+ "loss": 0.0341,
869
+ "step": 1160
870
+ },
871
+ {
872
+ "epoch": 2.273914363802004,
873
+ "grad_norm": 0.2385181337594986,
874
+ "learning_rate": 3.7756980654207032e-06,
875
+ "loss": 0.0307,
876
+ "step": 1170
877
+ },
878
+ {
879
+ "epoch": 2.2933495293045856,
880
+ "grad_norm": 0.24831977486610413,
881
+ "learning_rate": 3.585622875820965e-06,
882
+ "loss": 0.0324,
883
+ "step": 1180
884
+ },
885
+ {
886
+ "epoch": 2.3127846948071666,
887
+ "grad_norm": 0.2952122092247009,
888
+ "learning_rate": 3.399652485537691e-06,
889
+ "loss": 0.0337,
890
+ "step": 1190
891
+ },
892
+ {
893
+ "epoch": 2.3263893106589735,
894
+ "eval_loss": 0.042657386511564255,
895
+ "eval_runtime": 168.8317,
896
+ "eval_samples_per_second": 5.129,
897
+ "eval_steps_per_second": 5.129,
898
+ "step": 1197
899
+ },
900
+ {
901
+ "epoch": 2.332219860309748,
902
+ "grad_norm": 0.29106399416923523,
903
+ "learning_rate": 3.2178725282747665e-06,
904
+ "loss": 0.0346,
905
+ "step": 1200
906
+ },
907
+ {
908
+ "epoch": 2.351655025812329,
909
+ "grad_norm": 0.30523982644081116,
910
+ "learning_rate": 3.040366708169222e-06,
911
+ "loss": 0.036,
912
+ "step": 1210
913
+ },
914
+ {
915
+ "epoch": 2.3710901913149103,
916
+ "grad_norm": 0.2523854672908783,
917
+ "learning_rate": 2.8672167612480163e-06,
918
+ "loss": 0.0298,
919
+ "step": 1220
920
+ },
921
+ {
922
+ "epoch": 2.390525356817492,
923
+ "grad_norm": 0.2531813383102417,
924
+ "learning_rate": 2.698502417791121e-06,
925
+ "loss": 0.0383,
926
+ "step": 1230
927
+ },
928
+ {
929
+ "epoch": 2.409960522320073,
930
+ "grad_norm": 0.2635211944580078,
931
+ "learning_rate": 2.534301365618107e-06,
932
+ "loss": 0.0357,
933
+ "step": 1240
934
+ },
935
+ {
936
+ "epoch": 2.4293956878226544,
937
+ "grad_norm": 0.26689672470092773,
938
+ "learning_rate": 2.374689214315311e-06,
939
+ "loss": 0.0333,
940
+ "step": 1250
941
+ },
942
+ {
943
+ "epoch": 2.4488308533252354,
944
+ "grad_norm": 0.29953232407569885,
945
+ "learning_rate": 2.2197394604198853e-06,
946
+ "loss": 0.0317,
947
+ "step": 1260
948
+ },
949
+ {
950
+ "epoch": 2.4682660188278165,
951
+ "grad_norm": 0.21628761291503906,
952
+ "learning_rate": 2.069523453576909e-06,
953
+ "loss": 0.0362,
954
+ "step": 1270
955
+ },
956
+ {
957
+ "epoch": 2.487701184330398,
958
+ "grad_norm": 0.2457425892353058,
959
+ "learning_rate": 1.924110363685033e-06,
960
+ "loss": 0.0353,
961
+ "step": 1280
962
+ },
963
+ {
964
+ "epoch": 2.507136349832979,
965
+ "grad_norm": 0.3192308247089386,
966
+ "learning_rate": 1.783567149045845e-06,
967
+ "loss": 0.036,
968
+ "step": 1290
969
+ },
970
+ {
971
+ "epoch": 2.52657151533556,
972
+ "grad_norm": 0.2815244793891907,
973
+ "learning_rate": 1.6479585255316287e-06,
974
+ "loss": 0.0329,
975
+ "step": 1300
976
+ },
977
+ {
978
+ "epoch": 2.5460066808381416,
979
+ "grad_norm": 0.2549531161785126,
980
+ "learning_rate": 1.5173469367856477e-06,
981
+ "loss": 0.0382,
982
+ "step": 1310
983
+ },
984
+ {
985
+ "epoch": 2.5654418463407227,
986
+ "grad_norm": 0.25880110263824463,
987
+ "learning_rate": 1.3917925254687686e-06,
988
+ "loss": 0.0344,
989
+ "step": 1320
990
+ },
991
+ {
992
+ "epoch": 2.5848770118433038,
993
+ "grad_norm": 0.22720050811767578,
994
+ "learning_rate": 1.271353105565591e-06,
995
+ "loss": 0.0353,
996
+ "step": 1330
997
+ },
998
+ {
999
+ "epoch": 2.6043121773458853,
1000
+ "grad_norm": 0.2991912364959717,
1001
+ "learning_rate": 1.1560841357628582e-06,
1002
+ "loss": 0.0369,
1003
+ "step": 1340
1004
+ },
1005
+ {
1006
+ "epoch": 2.6237473428484663,
1007
+ "grad_norm": 0.3291521668434143,
1008
+ "learning_rate": 1.046038693912442e-06,
1009
+ "loss": 0.0331,
1010
+ "step": 1350
1011
+ },
1012
+ {
1013
+ "epoch": 2.643182508351048,
1014
+ "grad_norm": 0.24359193444252014,
1015
+ "learning_rate": 9.412674525905815e-07,
1016
+ "loss": 0.0335,
1017
+ "step": 1360
1018
+ },
1019
+ {
1020
+ "epoch": 2.6587306407531126,
1021
+ "eval_loss": 0.042365700006484985,
1022
+ "eval_runtime": 168.4092,
1023
+ "eval_samples_per_second": 5.142,
1024
+ "eval_steps_per_second": 5.142,
1025
+ "step": 1368
1026
+ },
1027
+ {
1028
+ "epoch": 2.662617673853629,
1029
+ "grad_norm": 0.30196014046669006,
1030
+ "learning_rate": 8.4181865576472e-07,
1031
+ "loss": 0.0319,
1032
+ "step": 1370
1033
+ },
1034
+ {
1035
+ "epoch": 2.68205283935621,
1036
+ "grad_norm": 0.31683069467544556,
1037
+ "learning_rate": 7.477380965786263e-07,
1038
+ "loss": 0.0353,
1039
+ "step": 1380
1040
+ },
1041
+ {
1042
+ "epoch": 2.7014880048587915,
1043
+ "grad_norm": 0.24554206430912018,
1044
+ "learning_rate": 6.590690962660393e-07,
1045
+ "loss": 0.0349,
1046
+ "step": 1390
1047
+ },
1048
+ {
1049
+ "epoch": 2.7209231703613725,
1050
+ "grad_norm": 0.3739015460014343,
1051
+ "learning_rate": 5.758524842025631e-07,
1052
+ "loss": 0.0353,
1053
+ "step": 1400
1054
+ },
1055
+ {
1056
+ "epoch": 2.740358335863954,
1057
+ "grad_norm": 0.27808961272239685,
1058
+ "learning_rate": 4.98126579104978e-07,
1059
+ "loss": 0.0337,
1060
+ "step": 1410
1061
+ },
1062
+ {
1063
+ "epoch": 2.759793501366535,
1064
+ "grad_norm": 0.2409674972295761,
1065
+ "learning_rate": 4.259271713866475e-07,
1066
+ "loss": 0.0357,
1067
+ "step": 1420
1068
+ },
1069
+ {
1070
+ "epoch": 2.779228666869116,
1071
+ "grad_norm": 0.2730494737625122,
1072
+ "learning_rate": 3.5928750667710844e-07,
1073
+ "loss": 0.0339,
1074
+ "step": 1430
1075
+ },
1076
+ {
1077
+ "epoch": 2.7986638323716977,
1078
+ "grad_norm": 0.23926123976707458,
1079
+ "learning_rate": 2.982382705134817e-07,
1080
+ "loss": 0.0349,
1081
+ "step": 1440
1082
+ },
1083
+ {
1084
+ "epoch": 2.8180989978742788,
1085
+ "grad_norm": 0.21703185141086578,
1086
+ "learning_rate": 2.4280757421071414e-07,
1087
+ "loss": 0.0366,
1088
+ "step": 1450
1089
+ },
1090
+ {
1091
+ "epoch": 2.8375341633768603,
1092
+ "grad_norm": 0.2484457939863205,
1093
+ "learning_rate": 1.9302094191716856e-07,
1094
+ "loss": 0.0338,
1095
+ "step": 1460
1096
+ },
1097
+ {
1098
+ "epoch": 2.8569693288794413,
1099
+ "grad_norm": 0.30563613772392273,
1100
+ "learning_rate": 1.489012988615368e-07,
1101
+ "loss": 0.0346,
1102
+ "step": 1470
1103
+ },
1104
+ {
1105
+ "epoch": 2.8764044943820224,
1106
+ "grad_norm": 0.3028404414653778,
1107
+ "learning_rate": 1.104689607964643e-07,
1108
+ "loss": 0.0386,
1109
+ "step": 1480
1110
+ },
1111
+ {
1112
+ "epoch": 2.8958396598846035,
1113
+ "grad_norm": 0.3035370111465454,
1114
+ "learning_rate": 7.774162464377066e-08,
1115
+ "loss": 0.0375,
1116
+ "step": 1490
1117
+ },
1118
+ {
1119
+ "epoch": 2.915274825387185,
1120
+ "grad_norm": 0.25364699959754944,
1121
+ "learning_rate": 5.073436034554874e-08,
1122
+ "loss": 0.0379,
1123
+ "step": 1500
1124
+ },
1125
+ {
1126
+ "epoch": 2.934709990889766,
1127
+ "grad_norm": 0.2718573212623596,
1128
+ "learning_rate": 2.945960392492092e-08,
1129
+ "loss": 0.0324,
1130
+ "step": 1510
1131
+ },
1132
+ {
1133
+ "epoch": 2.9541451563923475,
1134
+ "grad_norm": 0.33609631657600403,
1135
+ "learning_rate": 1.3927151759626777e-08,
1136
+ "loss": 0.0329,
1137
+ "step": 1520
1138
+ },
1139
+ {
1140
+ "epoch": 2.9735803218949286,
1141
+ "grad_norm": 0.2513211965560913,
1142
+ "learning_rate": 4.144156071086979e-09,
1143
+ "loss": 0.0338,
1144
+ "step": 1530
1145
+ },
1146
+ {
1147
+ "epoch": 2.9910719708472517,
1148
+ "eval_loss": 0.042311783879995346,
1149
+ "eval_runtime": 168.3509,
1150
+ "eval_samples_per_second": 5.144,
1151
+ "eval_steps_per_second": 5.144,
1152
+ "step": 1539
1153
+ }
1154
+ ],
1155
+ "logging_steps": 10,
1156
+ "max_steps": 1542,
1157
+ "num_input_tokens_seen": 0,
1158
+ "num_train_epochs": 3,
1159
+ "save_steps": 171,
1160
+ "stateful_callbacks": {
1161
+ "TrainerControl": {
1162
+ "args": {
1163
+ "should_epoch_stop": false,
1164
+ "should_evaluate": false,
1165
+ "should_log": false,
1166
+ "should_save": true,
1167
+ "should_training_stop": false
1168
+ },
1169
+ "attributes": {}
1170
+ }
1171
+ },
1172
+ "total_flos": 1.1799786844255027e+18,
1173
+ "train_batch_size": 1,
1174
+ "trial_name": null,
1175
+ "trial_params": null
1176
+ }
checkpoint-1539/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1d135889b53a4fc2dcc2af42081d4ec6fb88ac6da63713e48b39ec06726249a
3
+ size 5624
checkpoint-1542/README.md ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: Salesforce/Llama-xLAM-2-8b-fc-r
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:Salesforce/Llama-xLAM-2-8b-fc-r
7
+ - lora
8
+ - sft
9
+ - transformers
10
+ - trl
11
+ ---
12
+
13
+ # Model Card for Model ID
14
+
15
+ <!-- Provide a quick summary of what the model is/does. -->
16
+
17
+
18
+
19
+ ## Model Details
20
+
21
+ ### Model Description
22
+
23
+ <!-- Provide a longer summary of what this model is. -->
24
+
25
+
26
+
27
+ - **Developed by:** [More Information Needed]
28
+ - **Funded by [optional]:** [More Information Needed]
29
+ - **Shared by [optional]:** [More Information Needed]
30
+ - **Model type:** [More Information Needed]
31
+ - **Language(s) (NLP):** [More Information Needed]
32
+ - **License:** [More Information Needed]
33
+ - **Finetuned from model [optional]:** [More Information Needed]
34
+
35
+ ### Model Sources [optional]
36
+
37
+ <!-- Provide the basic links for the model. -->
38
+
39
+ - **Repository:** [More Information Needed]
40
+ - **Paper [optional]:** [More Information Needed]
41
+ - **Demo [optional]:** [More Information Needed]
42
+
43
+ ## Uses
44
+
45
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
46
+
47
+ ### Direct Use
48
+
49
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
50
+
51
+ [More Information Needed]
52
+
53
+ ### Downstream Use [optional]
54
+
55
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
56
+
57
+ [More Information Needed]
58
+
59
+ ### Out-of-Scope Use
60
+
61
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
62
+
63
+ [More Information Needed]
64
+
65
+ ## Bias, Risks, and Limitations
66
+
67
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
68
+
69
+ [More Information Needed]
70
+
71
+ ### Recommendations
72
+
73
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
74
+
75
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
76
+
77
+ ## How to Get Started with the Model
78
+
79
+ Use the code below to get started with the model.
80
+
81
+ [More Information Needed]
82
+
83
+ ## Training Details
84
+
85
+ ### Training Data
86
+
87
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
88
+
89
+ [More Information Needed]
90
+
91
+ ### Training Procedure
92
+
93
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
94
+
95
+ #### Preprocessing [optional]
96
+
97
+ [More Information Needed]
98
+
99
+
100
+ #### Training Hyperparameters
101
+
102
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
103
+
104
+ #### Speeds, Sizes, Times [optional]
105
+
106
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
107
+
108
+ [More Information Needed]
109
+
110
+ ## Evaluation
111
+
112
+ <!-- This section describes the evaluation protocols and provides the results. -->
113
+
114
+ ### Testing Data, Factors & Metrics
115
+
116
+ #### Testing Data
117
+
118
+ <!-- This should link to a Dataset Card if possible. -->
119
+
120
+ [More Information Needed]
121
+
122
+ #### Factors
123
+
124
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
125
+
126
+ [More Information Needed]
127
+
128
+ #### Metrics
129
+
130
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
131
+
132
+ [More Information Needed]
133
+
134
+ ### Results
135
+
136
+ [More Information Needed]
137
+
138
+ #### Summary
139
+
140
+
141
+
142
+ ## Model Examination [optional]
143
+
144
+ <!-- Relevant interpretability work for the model goes here -->
145
+
146
+ [More Information Needed]
147
+
148
+ ## Environmental Impact
149
+
150
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
151
+
152
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
153
+
154
+ - **Hardware Type:** [More Information Needed]
155
+ - **Hours used:** [More Information Needed]
156
+ - **Cloud Provider:** [More Information Needed]
157
+ - **Compute Region:** [More Information Needed]
158
+ - **Carbon Emitted:** [More Information Needed]
159
+
160
+ ## Technical Specifications [optional]
161
+
162
+ ### Model Architecture and Objective
163
+
164
+ [More Information Needed]
165
+
166
+ ### Compute Infrastructure
167
+
168
+ [More Information Needed]
169
+
170
+ #### Hardware
171
+
172
+ [More Information Needed]
173
+
174
+ #### Software
175
+
176
+ [More Information Needed]
177
+
178
+ ## Citation [optional]
179
+
180
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
181
+
182
+ **BibTeX:**
183
+
184
+ [More Information Needed]
185
+
186
+ **APA:**
187
+
188
+ [More Information Needed]
189
+
190
+ ## Glossary [optional]
191
+
192
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
193
+
194
+ [More Information Needed]
195
+
196
+ ## More Information [optional]
197
+
198
+ [More Information Needed]
199
+
200
+ ## Model Card Authors [optional]
201
+
202
+ [More Information Needed]
203
+
204
+ ## Model Card Contact
205
+
206
+ [More Information Needed]
207
+ ### Framework versions
208
+
209
+ - PEFT 0.17.1