Word2Li commited on
Commit
7c02997
·
verified ·
1 Parent(s): 6cd15d8

Upload model

Browse files
README.md ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: apache-2.0
4
+ base_model: mistralai/Mistral-7B-vp0.3
5
+ language: en
6
+ datasets:
7
+ - Word2Li/MiddOptimized
8
+ tags:
9
+ - llama-factory
10
+ - full
11
+ pipeline_tag: text-generation
12
+ model-index:
13
+ - name: Mistral-7B-v0.3-Middo-Alpaca
14
+ results:
15
+ - task:
16
+ type: text-generation
17
+ dataset:
18
+ name: MMLU
19
+ type: MMLU
20
+ metrics:
21
+ - name: Weighted Avg.
22
+ type: Weighted Avg.
23
+ value: 38.73
24
+ verified: true
25
+ - task:
26
+ type: text-generation
27
+ dataset:
28
+ name: IFEval
29
+ type: IFEval
30
+ metrics:
31
+ - name: Avg.
32
+ type: Avg.
33
+ value: 44.01
34
+ verified: true
35
+ - task:
36
+ type: text-generation
37
+ dataset:
38
+ name: GSM8K
39
+ type: GSM8K
40
+ metrics:
41
+ - name: pass@1
42
+ type: pass@1
43
+ value: 34.80
44
+ verified: true
45
+ - task:
46
+ type: text-generation
47
+ dataset:
48
+ name: MATH
49
+ type: MATH
50
+ metrics:
51
+ - name: pass@1
52
+ type: pass@1
53
+ value: 6.64
54
+ verified: true
55
+ - task:
56
+ type: text-generation
57
+ dataset:
58
+ name: HumanEval
59
+ type: HumanEval
60
+ metrics:
61
+ - name: pass@1
62
+ type: pass@1
63
+ value: 26.22
64
+ verified: true
65
+ - task:
66
+ type: text-generation
67
+ dataset:
68
+ name: MBPP
69
+ type: MBPP
70
+ metrics:
71
+ - name: pass@1
72
+ type: pass@1
73
+ value: 31.40
74
+ verified: true
75
+ - task:
76
+ type: text-generation
77
+ dataset:
78
+ name: Hellaswag
79
+ type: Hellaswag
80
+ metrics:
81
+ - name: pass@1
82
+ type: pass@1
83
+ value: 44.86
84
+ verified: true
85
+ - task:
86
+ type: text-generation
87
+ dataset:
88
+ name: GPQA
89
+ type: GPQA
90
+ metrics:
91
+ - name: pass@1
92
+ type: pass@1
93
+ value: 11.11
94
+ verified: true
95
+ metrics:
96
+ - accuracy
97
+ ---
98
+
99
+ # Mistral-7B-v0.3-Middo-Alpaca
100
+
101
+ Paper: [Middo: Model-Informed Dynamic Data Optimization for Enhanced LLM Fine-Tuning via Closed-Loop Learning](https://arxiv.org/abs/2508.21589)
102
+
103
+ Code: https://github.com/Word2VecT/Middo
104
+
105
+ ## Model description
106
+
107
+ This model is a fine-tuned version of [mistralai/Mistral-7B-v0.3](https://huggingface.co/mistralai/Mistral-7B-v0.3) on the [MiddOptimzed/llama_alpaca](https://huggingface.co/datasets/Word2Li/MiddOptimized/viewer/default/mistral_alpaca) dataset.
108
+
109
+ ## Training and evaluation data
110
+
111
+ ### Training data
112
+
113
+ Middo optimized [Alpaca](https://huggingface.co/datasets/tatsu-lab/alpaca) on [mistralai/Mistral-7B-v0.3](https://huggingface.co/mistralai/Mistral-7B-v0.3).
114
+
115
+ ### Evaluation data
116
+
117
+ - General
118
+ - MMLU
119
+ - IFEval
120
+ - Math
121
+ - GSM8K
122
+ - MATH
123
+ - Code
124
+ - HumanEval
125
+ - MBPP
126
+ - Reasoning
127
+ - Hellaswag
128
+ - GPQA
129
+
130
+ ## Training procedure
131
+
132
+ ### Training hyperparameters
133
+
134
+ The following hyperparameters were used during training:
135
+
136
+ - learning_rate: 1e-05
137
+ - train_batch_size: 4
138
+ - eval_batch_size: 8
139
+ - seed: 42
140
+ - distributed_type: multi-GPU
141
+ - num_devices: 8
142
+ - gradient_accumulation_steps: 8
143
+ - total_train_batch_size: 256
144
+ - total_eval_batch_size: 64
145
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
146
+ - lr_scheduler_type: cosine
147
+ - lr_scheduler_warmup_ratio: 0.03
148
+ - num_epochs: 1.0
149
+
150
+ ### Framework versions
151
+
152
+ - Transformers 4.45.2
153
+ - Pytorch 2.5.1+cu121
154
+ - Datasets 2.21.0
155
+ - Tokenizers 0.20.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 1.387246770039292e+18,
4
+ "train_loss": 1.0000714727661066,
5
+ "train_runtime": 2851.6353,
6
+ "train_samples_per_second": 20.461,
7
+ "train_steps_per_second": 0.08
8
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/petrelfs/tangzinan/LLaMA-Factory/models/Mistral-7B-v0.3",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 14336,
14
+ "max_position_embeddings": 32768,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 32,
18
+ "num_key_value_heads": 8,
19
+ "pad_token_id": 770,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.45.2",
26
+ "unsloth_version": "2024.9",
27
+ "use_cache": false,
28
+ "vocab_size": 32768
29
+ }
generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "max_length": 32768,
6
+ "pad_token_id": 770,
7
+ "transformers_version": "4.45.2"
8
+ }
model-00001-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9481b6ea50d5ca8ec517345454497a746e99176d875febc07cf6b9985c35974
3
+ size 4949453792
model-00002-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a79e6255f4d51b625ae5e75a6c8fdb63212eb49f809402be247a219e9b2ac35
3
+ size 4999819336
model-00003-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca3c7a2fdad22f2a0d72ddb944186c03ee1b795f8cb008dbd0456da44c49da36
3
+ size 4546807800
model.safetensors.index.json ADDED
@@ -0,0 +1,298 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 14496047104
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00003-of-00003.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00003.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
242
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
243
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
244
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
245
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
246
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
247
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
248
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
249
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
250
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
251
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
252
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
253
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
254
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
255
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
256
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
257
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
258
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
259
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
260
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
261
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
262
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
263
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
264
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
265
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
266
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
267
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
268
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
269
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
270
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
271
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
272
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
273
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
274
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
275
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
276
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
277
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
278
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
279
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
280
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
281
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
282
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
283
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
284
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
285
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
286
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
287
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
288
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
289
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
290
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
291
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
292
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
293
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
294
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
295
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
296
+ "model.norm.weight": "model-00003-of-00003.safetensors"
297
+ }
298
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[control_768]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37f00374dea48658ee8f5d0f21895b9bc55cb0103939607c8185bfd1c6ca1f89
3
+ size 587404
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 1.387246770039292e+18,
4
+ "train_loss": 1.0000714727661066,
5
+ "train_runtime": 2851.6353,
6
+ "train_samples_per_second": 20.461,
7
+ "train_steps_per_second": 0.08
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,229 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 228, "loss": 1.2955, "lr": 1.4285714285714286e-06, "epoch": 0.0043859649122807015, "percentage": 0.44, "elapsed_time": "0:00:14", "remaining_time": "0:55:23"}
2
+ {"current_steps": 2, "total_steps": 228, "loss": 1.3459, "lr": 2.8571428571428573e-06, "epoch": 0.008771929824561403, "percentage": 0.88, "elapsed_time": "0:00:27", "remaining_time": "0:50:57"}
3
+ {"current_steps": 3, "total_steps": 228, "loss": 1.2305, "lr": 4.2857142857142855e-06, "epoch": 0.013157894736842105, "percentage": 1.32, "elapsed_time": "0:00:39", "remaining_time": "0:49:00"}
4
+ {"current_steps": 4, "total_steps": 228, "loss": 1.0947, "lr": 5.7142857142857145e-06, "epoch": 0.017543859649122806, "percentage": 1.75, "elapsed_time": "0:00:52", "remaining_time": "0:49:07"}
5
+ {"current_steps": 5, "total_steps": 228, "loss": 1.0768, "lr": 7.1428571428571436e-06, "epoch": 0.021929824561403508, "percentage": 2.19, "elapsed_time": "0:01:04", "remaining_time": "0:48:14"}
6
+ {"current_steps": 6, "total_steps": 228, "loss": 1.0448, "lr": 8.571428571428571e-06, "epoch": 0.02631578947368421, "percentage": 2.63, "elapsed_time": "0:01:17", "remaining_time": "0:47:47"}
7
+ {"current_steps": 7, "total_steps": 228, "loss": 1.0738, "lr": 1e-05, "epoch": 0.03070175438596491, "percentage": 3.07, "elapsed_time": "0:01:30", "remaining_time": "0:47:44"}
8
+ {"current_steps": 8, "total_steps": 228, "loss": 1.0353, "lr": 9.999494817970498e-06, "epoch": 0.03508771929824561, "percentage": 3.51, "elapsed_time": "0:01:42", "remaining_time": "0:47:04"}
9
+ {"current_steps": 9, "total_steps": 228, "loss": 0.9942, "lr": 9.997979373965542e-06, "epoch": 0.039473684210526314, "percentage": 3.95, "elapsed_time": "0:01:53", "remaining_time": "0:46:13"}
10
+ {"current_steps": 10, "total_steps": 228, "loss": 1.0467, "lr": 9.995453974215164e-06, "epoch": 0.043859649122807015, "percentage": 4.39, "elapsed_time": "0:02:06", "remaining_time": "0:46:06"}
11
+ {"current_steps": 11, "total_steps": 228, "loss": 0.9998, "lr": 9.991919129033994e-06, "epoch": 0.04824561403508772, "percentage": 4.82, "elapsed_time": "0:02:19", "remaining_time": "0:45:44"}
12
+ {"current_steps": 12, "total_steps": 228, "loss": 0.9997, "lr": 9.987375552718133e-06, "epoch": 0.05263157894736842, "percentage": 5.26, "elapsed_time": "0:02:31", "remaining_time": "0:45:31"}
13
+ {"current_steps": 13, "total_steps": 228, "loss": 1.0602, "lr": 9.981824163400827e-06, "epoch": 0.05701754385964912, "percentage": 5.7, "elapsed_time": "0:02:44", "remaining_time": "0:45:13"}
14
+ {"current_steps": 14, "total_steps": 228, "loss": 1.043, "lr": 9.975266082866923e-06, "epoch": 0.06140350877192982, "percentage": 6.14, "elapsed_time": "0:02:56", "remaining_time": "0:44:55"}
15
+ {"current_steps": 15, "total_steps": 228, "loss": 1.0331, "lr": 9.967702636326195e-06, "epoch": 0.06578947368421052, "percentage": 6.58, "elapsed_time": "0:03:08", "remaining_time": "0:44:37"}
16
+ {"current_steps": 16, "total_steps": 228, "loss": 1.0376, "lr": 9.959135352145552e-06, "epoch": 0.07017543859649122, "percentage": 7.02, "elapsed_time": "0:03:19", "remaining_time": "0:44:06"}
17
+ {"current_steps": 17, "total_steps": 228, "loss": 1.0244, "lr": 9.9495659615402e-06, "epoch": 0.07456140350877193, "percentage": 7.46, "elapsed_time": "0:03:31", "remaining_time": "0:43:43"}
18
+ {"current_steps": 18, "total_steps": 228, "loss": 0.9812, "lr": 9.938996398223802e-06, "epoch": 0.07894736842105263, "percentage": 7.89, "elapsed_time": "0:03:45", "remaining_time": "0:43:45"}
19
+ {"current_steps": 19, "total_steps": 228, "loss": 1.0288, "lr": 9.927428798017738e-06, "epoch": 0.08333333333333333, "percentage": 8.33, "elapsed_time": "0:03:57", "remaining_time": "0:43:37"}
20
+ {"current_steps": 20, "total_steps": 228, "loss": 1.0403, "lr": 9.91486549841951e-06, "epoch": 0.08771929824561403, "percentage": 8.77, "elapsed_time": "0:04:10", "remaining_time": "0:43:24"}
21
+ {"current_steps": 21, "total_steps": 228, "loss": 1.046, "lr": 9.901309038130392e-06, "epoch": 0.09210526315789473, "percentage": 9.21, "elapsed_time": "0:04:22", "remaining_time": "0:43:07"}
22
+ {"current_steps": 22, "total_steps": 228, "loss": 1.0609, "lr": 9.886762156542428e-06, "epoch": 0.09649122807017543, "percentage": 9.65, "elapsed_time": "0:04:33", "remaining_time": "0:42:44"}
23
+ {"current_steps": 23, "total_steps": 228, "loss": 1.03, "lr": 9.871227793184893e-06, "epoch": 0.10087719298245613, "percentage": 10.09, "elapsed_time": "0:04:45", "remaining_time": "0:42:26"}
24
+ {"current_steps": 24, "total_steps": 228, "loss": 1.0388, "lr": 9.854709087130261e-06, "epoch": 0.10526315789473684, "percentage": 10.53, "elapsed_time": "0:04:56", "remaining_time": "0:42:00"}
25
+ {"current_steps": 25, "total_steps": 228, "loss": 1.0588, "lr": 9.837209376359918e-06, "epoch": 0.10964912280701754, "percentage": 10.96, "elapsed_time": "0:05:08", "remaining_time": "0:41:47"}
26
+ {"current_steps": 26, "total_steps": 228, "loss": 1.0688, "lr": 9.81873219708962e-06, "epoch": 0.11403508771929824, "percentage": 11.4, "elapsed_time": "0:05:20", "remaining_time": "0:41:31"}
27
+ {"current_steps": 27, "total_steps": 228, "loss": 1.0311, "lr": 9.79928128305494e-06, "epoch": 0.11842105263157894, "percentage": 11.84, "elapsed_time": "0:05:32", "remaining_time": "0:41:18"}
28
+ {"current_steps": 28, "total_steps": 228, "loss": 1.0086, "lr": 9.778860564756769e-06, "epoch": 0.12280701754385964, "percentage": 12.28, "elapsed_time": "0:05:45", "remaining_time": "0:41:07"}
29
+ {"current_steps": 29, "total_steps": 228, "loss": 1.0627, "lr": 9.757474168667072e-06, "epoch": 0.12719298245614036, "percentage": 12.72, "elapsed_time": "0:05:57", "remaining_time": "0:40:55"}
30
+ {"current_steps": 30, "total_steps": 228, "loss": 1.0349, "lr": 9.73512641639504e-06, "epoch": 0.13157894736842105, "percentage": 13.16, "elapsed_time": "0:06:10", "remaining_time": "0:40:43"}
31
+ {"current_steps": 31, "total_steps": 228, "loss": 1.1047, "lr": 9.711821823813812e-06, "epoch": 0.13596491228070176, "percentage": 13.6, "elapsed_time": "0:06:22", "remaining_time": "0:40:28"}
32
+ {"current_steps": 32, "total_steps": 228, "loss": 1.0646, "lr": 9.68756510014794e-06, "epoch": 0.14035087719298245, "percentage": 14.04, "elapsed_time": "0:06:33", "remaining_time": "0:40:12"}
33
+ {"current_steps": 33, "total_steps": 228, "loss": 1.0211, "lr": 9.66236114702178e-06, "epoch": 0.14473684210526316, "percentage": 14.47, "elapsed_time": "0:06:46", "remaining_time": "0:40:02"}
34
+ {"current_steps": 34, "total_steps": 228, "loss": 1.03, "lr": 9.636215057469009e-06, "epoch": 0.14912280701754385, "percentage": 14.91, "elapsed_time": "0:06:58", "remaining_time": "0:39:49"}
35
+ {"current_steps": 35, "total_steps": 228, "loss": 1.0495, "lr": 9.609132114903458e-06, "epoch": 0.15350877192982457, "percentage": 15.35, "elapsed_time": "0:07:11", "remaining_time": "0:39:39"}
36
+ {"current_steps": 36, "total_steps": 228, "loss": 1.0653, "lr": 9.581117792051487e-06, "epoch": 0.15789473684210525, "percentage": 15.79, "elapsed_time": "0:07:23", "remaining_time": "0:39:27"}
37
+ {"current_steps": 37, "total_steps": 228, "loss": 1.0085, "lr": 9.552177749846083e-06, "epoch": 0.16228070175438597, "percentage": 16.23, "elapsed_time": "0:07:36", "remaining_time": "0:39:14"}
38
+ {"current_steps": 38, "total_steps": 228, "loss": 1.0548, "lr": 9.522317836282949e-06, "epoch": 0.16666666666666666, "percentage": 16.67, "elapsed_time": "0:07:48", "remaining_time": "0:39:02"}
39
+ {"current_steps": 39, "total_steps": 228, "loss": 1.0704, "lr": 9.491544085238778e-06, "epoch": 0.17105263157894737, "percentage": 17.11, "elapsed_time": "0:08:00", "remaining_time": "0:38:50"}
40
+ {"current_steps": 40, "total_steps": 228, "loss": 1.0463, "lr": 9.459862715251973e-06, "epoch": 0.17543859649122806, "percentage": 17.54, "elapsed_time": "0:08:14", "remaining_time": "0:38:45"}
41
+ {"current_steps": 41, "total_steps": 228, "loss": 1.0771, "lr": 9.427280128266049e-06, "epoch": 0.17982456140350878, "percentage": 17.98, "elapsed_time": "0:08:27", "remaining_time": "0:38:33"}
42
+ {"current_steps": 42, "total_steps": 228, "loss": 1.0615, "lr": 9.393802908335978e-06, "epoch": 0.18421052631578946, "percentage": 18.42, "elapsed_time": "0:08:39", "remaining_time": "0:38:20"}
43
+ {"current_steps": 43, "total_steps": 228, "loss": 1.0431, "lr": 9.359437820297716e-06, "epoch": 0.18859649122807018, "percentage": 18.86, "elapsed_time": "0:08:52", "remaining_time": "0:38:10"}
44
+ {"current_steps": 44, "total_steps": 228, "loss": 1.0271, "lr": 9.324191808401235e-06, "epoch": 0.19298245614035087, "percentage": 19.3, "elapsed_time": "0:09:03", "remaining_time": "0:37:54"}
45
+ {"current_steps": 45, "total_steps": 228, "loss": 1.0765, "lr": 9.288071994907262e-06, "epoch": 0.19736842105263158, "percentage": 19.74, "elapsed_time": "0:09:15", "remaining_time": "0:37:38"}
46
+ {"current_steps": 46, "total_steps": 228, "loss": 1.0526, "lr": 9.251085678648072e-06, "epoch": 0.20175438596491227, "percentage": 20.18, "elapsed_time": "0:09:28", "remaining_time": "0:37:27"}
47
+ {"current_steps": 47, "total_steps": 228, "loss": 1.0502, "lr": 9.213240333552589e-06, "epoch": 0.20614035087719298, "percentage": 20.61, "elapsed_time": "0:09:41", "remaining_time": "0:37:17"}
48
+ {"current_steps": 48, "total_steps": 228, "loss": 0.9878, "lr": 9.174543607136111e-06, "epoch": 0.21052631578947367, "percentage": 21.05, "elapsed_time": "0:09:54", "remaining_time": "0:37:07"}
49
+ {"current_steps": 49, "total_steps": 228, "loss": 0.9989, "lr": 9.135003318954954e-06, "epoch": 0.2149122807017544, "percentage": 21.49, "elapsed_time": "0:10:06", "remaining_time": "0:36:53"}
50
+ {"current_steps": 50, "total_steps": 228, "loss": 1.0816, "lr": 9.094627459026326e-06, "epoch": 0.21929824561403508, "percentage": 21.93, "elapsed_time": "0:10:17", "remaining_time": "0:36:38"}
51
+ {"current_steps": 51, "total_steps": 228, "loss": 1.0509, "lr": 9.053424186213776e-06, "epoch": 0.2236842105263158, "percentage": 22.37, "elapsed_time": "0:10:30", "remaining_time": "0:36:28"}
52
+ {"current_steps": 52, "total_steps": 228, "loss": 0.9849, "lr": 9.011401826578492e-06, "epoch": 0.22807017543859648, "percentage": 22.81, "elapsed_time": "0:10:44", "remaining_time": "0:36:22"}
53
+ {"current_steps": 53, "total_steps": 228, "loss": 0.9975, "lr": 8.968568871696847e-06, "epoch": 0.2324561403508772, "percentage": 23.25, "elapsed_time": "0:10:56", "remaining_time": "0:36:08"}
54
+ {"current_steps": 54, "total_steps": 228, "loss": 1.0747, "lr": 8.924933976944474e-06, "epoch": 0.23684210526315788, "percentage": 23.68, "elapsed_time": "0:11:10", "remaining_time": "0:35:59"}
55
+ {"current_steps": 55, "total_steps": 228, "loss": 1.028, "lr": 8.880505959747245e-06, "epoch": 0.2412280701754386, "percentage": 24.12, "elapsed_time": "0:11:22", "remaining_time": "0:35:47"}
56
+ {"current_steps": 56, "total_steps": 228, "loss": 0.9659, "lr": 8.835293797799517e-06, "epoch": 0.24561403508771928, "percentage": 24.56, "elapsed_time": "0:11:34", "remaining_time": "0:35:33"}
57
+ {"current_steps": 57, "total_steps": 228, "loss": 1.0228, "lr": 8.789306627249985e-06, "epoch": 0.25, "percentage": 25.0, "elapsed_time": "0:11:47", "remaining_time": "0:35:21"}
58
+ {"current_steps": 58, "total_steps": 228, "loss": 1.0338, "lr": 8.742553740855507e-06, "epoch": 0.2543859649122807, "percentage": 25.44, "elapsed_time": "0:11:59", "remaining_time": "0:35:07"}
59
+ {"current_steps": 59, "total_steps": 228, "loss": 1.0344, "lr": 8.695044586103297e-06, "epoch": 0.25877192982456143, "percentage": 25.88, "elapsed_time": "0:12:12", "remaining_time": "0:34:57"}
60
+ {"current_steps": 60, "total_steps": 228, "loss": 1.0423, "lr": 8.646788763301842e-06, "epoch": 0.2631578947368421, "percentage": 26.32, "elapsed_time": "0:12:24", "remaining_time": "0:34:45"}
61
+ {"current_steps": 61, "total_steps": 228, "loss": 1.0372, "lr": 8.59779602364094e-06, "epoch": 0.2675438596491228, "percentage": 26.75, "elapsed_time": "0:12:37", "remaining_time": "0:34:32"}
62
+ {"current_steps": 62, "total_steps": 228, "loss": 1.0935, "lr": 8.548076267221258e-06, "epoch": 0.2719298245614035, "percentage": 27.19, "elapsed_time": "0:12:48", "remaining_time": "0:34:18"}
63
+ {"current_steps": 63, "total_steps": 228, "loss": 0.9915, "lr": 8.497639541053769e-06, "epoch": 0.27631578947368424, "percentage": 27.63, "elapsed_time": "0:13:01", "remaining_time": "0:34:07"}
64
+ {"current_steps": 64, "total_steps": 228, "loss": 1.0252, "lr": 8.446496037029555e-06, "epoch": 0.2807017543859649, "percentage": 28.07, "elapsed_time": "0:13:17", "remaining_time": "0:34:02"}
65
+ {"current_steps": 65, "total_steps": 228, "loss": 0.9989, "lr": 8.394656089860274e-06, "epoch": 0.2850877192982456, "percentage": 28.51, "elapsed_time": "0:13:28", "remaining_time": "0:33:48"}
66
+ {"current_steps": 66, "total_steps": 228, "loss": 1.0872, "lr": 8.342130174989819e-06, "epoch": 0.2894736842105263, "percentage": 28.95, "elapsed_time": "0:13:40", "remaining_time": "0:33:33"}
67
+ {"current_steps": 67, "total_steps": 228, "loss": 1.0545, "lr": 8.288928906477497e-06, "epoch": 0.29385964912280704, "percentage": 29.39, "elapsed_time": "0:13:52", "remaining_time": "0:33:20"}
68
+ {"current_steps": 68, "total_steps": 228, "loss": 1.0128, "lr": 8.235063034853228e-06, "epoch": 0.2982456140350877, "percentage": 29.82, "elapsed_time": "0:14:05", "remaining_time": "0:33:08"}
69
+ {"current_steps": 69, "total_steps": 228, "loss": 1.0294, "lr": 8.180543444945154e-06, "epoch": 0.3026315789473684, "percentage": 30.26, "elapsed_time": "0:14:17", "remaining_time": "0:32:55"}
70
+ {"current_steps": 70, "total_steps": 228, "loss": 1.0445, "lr": 8.125381153680103e-06, "epoch": 0.30701754385964913, "percentage": 30.7, "elapsed_time": "0:14:29", "remaining_time": "0:32:41"}
71
+ {"current_steps": 71, "total_steps": 228, "loss": 1.0168, "lr": 8.069587307857377e-06, "epoch": 0.31140350877192985, "percentage": 31.14, "elapsed_time": "0:14:41", "remaining_time": "0:32:29"}
72
+ {"current_steps": 72, "total_steps": 228, "loss": 1.0207, "lr": 8.013173181896283e-06, "epoch": 0.3157894736842105, "percentage": 31.58, "elapsed_time": "0:14:54", "remaining_time": "0:32:18"}
73
+ {"current_steps": 73, "total_steps": 228, "loss": 1.0401, "lr": 7.95615017555788e-06, "epoch": 0.3201754385964912, "percentage": 32.02, "elapsed_time": "0:15:05", "remaining_time": "0:32:01"}
74
+ {"current_steps": 74, "total_steps": 228, "loss": 1.0347, "lr": 7.898529811641393e-06, "epoch": 0.32456140350877194, "percentage": 32.46, "elapsed_time": "0:15:17", "remaining_time": "0:31:49"}
75
+ {"current_steps": 75, "total_steps": 228, "loss": 1.061, "lr": 7.84032373365578e-06, "epoch": 0.32894736842105265, "percentage": 32.89, "elapsed_time": "0:15:31", "remaining_time": "0:31:40"}
76
+ {"current_steps": 76, "total_steps": 228, "loss": 0.988, "lr": 7.781543703466881e-06, "epoch": 0.3333333333333333, "percentage": 33.33, "elapsed_time": "0:15:44", "remaining_time": "0:31:28"}
77
+ {"current_steps": 77, "total_steps": 228, "loss": 1.0442, "lr": 7.722201598920673e-06, "epoch": 0.33771929824561403, "percentage": 33.77, "elapsed_time": "0:15:57", "remaining_time": "0:31:16"}
78
+ {"current_steps": 78, "total_steps": 228, "loss": 1.0412, "lr": 7.662309411443084e-06, "epoch": 0.34210526315789475, "percentage": 34.21, "elapsed_time": "0:16:09", "remaining_time": "0:31:04"}
79
+ {"current_steps": 79, "total_steps": 228, "loss": 1.0205, "lr": 7.601879243616838e-06, "epoch": 0.34649122807017546, "percentage": 34.65, "elapsed_time": "0:16:22", "remaining_time": "0:30:52"}
80
+ {"current_steps": 80, "total_steps": 228, "loss": 1.0298, "lr": 7.540923306735868e-06, "epoch": 0.3508771929824561, "percentage": 35.09, "elapsed_time": "0:16:34", "remaining_time": "0:30:40"}
81
+ {"current_steps": 81, "total_steps": 228, "loss": 1.0174, "lr": 7.479453918337733e-06, "epoch": 0.35526315789473684, "percentage": 35.53, "elapsed_time": "0:16:47", "remaining_time": "0:30:27"}
82
+ {"current_steps": 82, "total_steps": 228, "loss": 1.0281, "lr": 7.417483499714589e-06, "epoch": 0.35964912280701755, "percentage": 35.96, "elapsed_time": "0:16:59", "remaining_time": "0:30:14"}
83
+ {"current_steps": 83, "total_steps": 228, "loss": 1.0045, "lr": 7.355024573403174e-06, "epoch": 0.36403508771929827, "percentage": 36.4, "elapsed_time": "0:17:11", "remaining_time": "0:30:01"}
84
+ {"current_steps": 84, "total_steps": 228, "loss": 1.0253, "lr": 7.292089760654352e-06, "epoch": 0.3684210526315789, "percentage": 36.84, "elapsed_time": "0:17:23", "remaining_time": "0:29:48"}
85
+ {"current_steps": 85, "total_steps": 228, "loss": 1.0245, "lr": 7.2286917788826926e-06, "epoch": 0.37280701754385964, "percentage": 37.28, "elapsed_time": "0:17:35", "remaining_time": "0:29:35"}
86
+ {"current_steps": 86, "total_steps": 228, "loss": 1.0149, "lr": 7.1648434390966356e-06, "epoch": 0.37719298245614036, "percentage": 37.72, "elapsed_time": "0:17:47", "remaining_time": "0:29:22"}
87
+ {"current_steps": 87, "total_steps": 228, "loss": 1.0019, "lr": 7.100557643309732e-06, "epoch": 0.3815789473684211, "percentage": 38.16, "elapsed_time": "0:17:59", "remaining_time": "0:29:10"}
88
+ {"current_steps": 88, "total_steps": 228, "loss": 1.0147, "lr": 7.035847381933494e-06, "epoch": 0.38596491228070173, "percentage": 38.6, "elapsed_time": "0:18:11", "remaining_time": "0:28:56"}
89
+ {"current_steps": 89, "total_steps": 228, "loss": 0.9572, "lr": 6.970725731152389e-06, "epoch": 0.39035087719298245, "percentage": 39.04, "elapsed_time": "0:18:23", "remaining_time": "0:28:42"}
90
+ {"current_steps": 90, "total_steps": 228, "loss": 0.9651, "lr": 6.905205850281502e-06, "epoch": 0.39473684210526316, "percentage": 39.47, "elapsed_time": "0:18:35", "remaining_time": "0:28:30"}
91
+ {"current_steps": 91, "total_steps": 228, "loss": 1.0471, "lr": 6.8393009791073895e-06, "epoch": 0.3991228070175439, "percentage": 39.91, "elapsed_time": "0:18:46", "remaining_time": "0:28:15"}
92
+ {"current_steps": 92, "total_steps": 228, "loss": 1.0062, "lr": 6.773024435212678e-06, "epoch": 0.40350877192982454, "percentage": 40.35, "elapsed_time": "0:18:57", "remaining_time": "0:28:01"}
93
+ {"current_steps": 93, "total_steps": 228, "loss": 1.0101, "lr": 6.706389611284953e-06, "epoch": 0.40789473684210525, "percentage": 40.79, "elapsed_time": "0:19:10", "remaining_time": "0:27:49"}
94
+ {"current_steps": 94, "total_steps": 228, "loss": 1.0208, "lr": 6.639409972410446e-06, "epoch": 0.41228070175438597, "percentage": 41.23, "elapsed_time": "0:19:21", "remaining_time": "0:27:36"}
95
+ {"current_steps": 95, "total_steps": 228, "loss": 1.0014, "lr": 6.57209905335312e-06, "epoch": 0.4166666666666667, "percentage": 41.67, "elapsed_time": "0:19:34", "remaining_time": "0:27:24"}
96
+ {"current_steps": 96, "total_steps": 228, "loss": 0.9726, "lr": 6.504470455819651e-06, "epoch": 0.42105263157894735, "percentage": 42.11, "elapsed_time": "0:19:45", "remaining_time": "0:27:10"}
97
+ {"current_steps": 97, "total_steps": 228, "loss": 1.0462, "lr": 6.436537845710904e-06, "epoch": 0.42543859649122806, "percentage": 42.54, "elapsed_time": "0:19:58", "remaining_time": "0:26:58"}
98
+ {"current_steps": 98, "total_steps": 228, "loss": 0.997, "lr": 6.368314950360416e-06, "epoch": 0.4298245614035088, "percentage": 42.98, "elapsed_time": "0:20:10", "remaining_time": "0:26:45"}
99
+ {"current_steps": 99, "total_steps": 228, "loss": 1.0351, "lr": 6.299815555760478e-06, "epoch": 0.4342105263157895, "percentage": 43.42, "elapsed_time": "0:20:24", "remaining_time": "0:26:35"}
100
+ {"current_steps": 100, "total_steps": 228, "loss": 1.04, "lr": 6.231053503776363e-06, "epoch": 0.43859649122807015, "percentage": 43.86, "elapsed_time": "0:20:36", "remaining_time": "0:26:22"}
101
+ {"current_steps": 101, "total_steps": 228, "loss": 0.9802, "lr": 6.1620426893492645e-06, "epoch": 0.44298245614035087, "percentage": 44.3, "elapsed_time": "0:20:48", "remaining_time": "0:26:09"}
102
+ {"current_steps": 102, "total_steps": 228, "loss": 1.006, "lr": 6.092797057688496e-06, "epoch": 0.4473684210526316, "percentage": 44.74, "elapsed_time": "0:21:01", "remaining_time": "0:25:57"}
103
+ {"current_steps": 103, "total_steps": 228, "loss": 0.9833, "lr": 6.0233306014535505e-06, "epoch": 0.4517543859649123, "percentage": 45.18, "elapsed_time": "0:21:12", "remaining_time": "0:25:44"}
104
+ {"current_steps": 104, "total_steps": 228, "loss": 1.0409, "lr": 5.953657357926569e-06, "epoch": 0.45614035087719296, "percentage": 45.61, "elapsed_time": "0:21:24", "remaining_time": "0:25:31"}
105
+ {"current_steps": 105, "total_steps": 228, "loss": 1.0191, "lr": 5.883791406175775e-06, "epoch": 0.4605263157894737, "percentage": 46.05, "elapsed_time": "0:21:37", "remaining_time": "0:25:19"}
106
+ {"current_steps": 106, "total_steps": 228, "loss": 0.9992, "lr": 5.813746864210489e-06, "epoch": 0.4649122807017544, "percentage": 46.49, "elapsed_time": "0:21:50", "remaining_time": "0:25:08"}
107
+ {"current_steps": 107, "total_steps": 228, "loss": 1.0023, "lr": 5.743537886128258e-06, "epoch": 0.4692982456140351, "percentage": 46.93, "elapsed_time": "0:22:02", "remaining_time": "0:24:55"}
108
+ {"current_steps": 108, "total_steps": 228, "loss": 0.9909, "lr": 5.673178659254698e-06, "epoch": 0.47368421052631576, "percentage": 47.37, "elapsed_time": "0:22:14", "remaining_time": "0:24:42"}
109
+ {"current_steps": 109, "total_steps": 228, "loss": 0.9598, "lr": 5.6026834012766155e-06, "epoch": 0.4780701754385965, "percentage": 47.81, "elapsed_time": "0:22:25", "remaining_time": "0:24:29"}
110
+ {"current_steps": 110, "total_steps": 228, "loss": 0.9879, "lr": 5.532066357369012e-06, "epoch": 0.4824561403508772, "percentage": 48.25, "elapsed_time": "0:22:37", "remaining_time": "0:24:16"}
111
+ {"current_steps": 111, "total_steps": 228, "loss": 1.0184, "lr": 5.46134179731651e-06, "epoch": 0.4868421052631579, "percentage": 48.68, "elapsed_time": "0:22:50", "remaining_time": "0:24:04"}
112
+ {"current_steps": 112, "total_steps": 228, "loss": 1.0496, "lr": 5.390524012629824e-06, "epoch": 0.49122807017543857, "percentage": 49.12, "elapsed_time": "0:23:02", "remaining_time": "0:23:51"}
113
+ {"current_steps": 113, "total_steps": 228, "loss": 1.0399, "lr": 5.319627313657829e-06, "epoch": 0.4956140350877193, "percentage": 49.56, "elapsed_time": "0:23:14", "remaining_time": "0:23:39"}
114
+ {"current_steps": 114, "total_steps": 228, "loss": 0.9906, "lr": 5.248666026695835e-06, "epoch": 0.5, "percentage": 50.0, "elapsed_time": "0:23:26", "remaining_time": "0:23:26"}
115
+ {"current_steps": 115, "total_steps": 228, "loss": 0.9836, "lr": 5.177654491090627e-06, "epoch": 0.5043859649122807, "percentage": 50.44, "elapsed_time": "0:23:37", "remaining_time": "0:23:13"}
116
+ {"current_steps": 116, "total_steps": 228, "loss": 1.0083, "lr": 5.1066070563428736e-06, "epoch": 0.5087719298245614, "percentage": 50.88, "elapsed_time": "0:23:49", "remaining_time": "0:23:00"}
117
+ {"current_steps": 117, "total_steps": 228, "loss": 1.0034, "lr": 5.035538079207488e-06, "epoch": 0.5131578947368421, "percentage": 51.32, "elapsed_time": "0:24:01", "remaining_time": "0:22:47"}
118
+ {"current_steps": 118, "total_steps": 228, "loss": 1.0244, "lr": 4.964461920792512e-06, "epoch": 0.5175438596491229, "percentage": 51.75, "elapsed_time": "0:24:13", "remaining_time": "0:22:35"}
119
+ {"current_steps": 119, "total_steps": 228, "loss": 0.9473, "lr": 4.893392943657127e-06, "epoch": 0.5219298245614035, "percentage": 52.19, "elapsed_time": "0:24:26", "remaining_time": "0:22:22"}
120
+ {"current_steps": 120, "total_steps": 228, "loss": 0.93, "lr": 4.822345508909376e-06, "epoch": 0.5263157894736842, "percentage": 52.63, "elapsed_time": "0:24:40", "remaining_time": "0:22:12"}
121
+ {"current_steps": 121, "total_steps": 228, "loss": 1.0262, "lr": 4.751333973304166e-06, "epoch": 0.5307017543859649, "percentage": 53.07, "elapsed_time": "0:24:53", "remaining_time": "0:22:00"}
122
+ {"current_steps": 122, "total_steps": 228, "loss": 1.0236, "lr": 4.680372686342173e-06, "epoch": 0.5350877192982456, "percentage": 53.51, "elapsed_time": "0:25:05", "remaining_time": "0:21:48"}
123
+ {"current_steps": 123, "total_steps": 228, "loss": 0.9891, "lr": 4.609475987370177e-06, "epoch": 0.5394736842105263, "percentage": 53.95, "elapsed_time": "0:25:17", "remaining_time": "0:21:35"}
124
+ {"current_steps": 124, "total_steps": 228, "loss": 0.9761, "lr": 4.53865820268349e-06, "epoch": 0.543859649122807, "percentage": 54.39, "elapsed_time": "0:25:29", "remaining_time": "0:21:22"}
125
+ {"current_steps": 125, "total_steps": 228, "loss": 0.9847, "lr": 4.467933642630989e-06, "epoch": 0.5482456140350878, "percentage": 54.82, "elapsed_time": "0:25:41", "remaining_time": "0:21:10"}
126
+ {"current_steps": 126, "total_steps": 228, "loss": 0.9916, "lr": 4.397316598723385e-06, "epoch": 0.5526315789473685, "percentage": 55.26, "elapsed_time": "0:25:53", "remaining_time": "0:20:57"}
127
+ {"current_steps": 127, "total_steps": 228, "loss": 0.9535, "lr": 4.326821340745304e-06, "epoch": 0.5570175438596491, "percentage": 55.7, "elapsed_time": "0:26:05", "remaining_time": "0:20:45"}
128
+ {"current_steps": 128, "total_steps": 228, "loss": 0.9249, "lr": 4.256462113871741e-06, "epoch": 0.5614035087719298, "percentage": 56.14, "elapsed_time": "0:26:17", "remaining_time": "0:20:32"}
129
+ {"current_steps": 129, "total_steps": 228, "loss": 0.9885, "lr": 4.186253135789511e-06, "epoch": 0.5657894736842105, "percentage": 56.58, "elapsed_time": "0:26:28", "remaining_time": "0:20:19"}
130
+ {"current_steps": 130, "total_steps": 228, "loss": 1.0087, "lr": 4.116208593824227e-06, "epoch": 0.5701754385964912, "percentage": 57.02, "elapsed_time": "0:26:40", "remaining_time": "0:20:06"}
131
+ {"current_steps": 131, "total_steps": 228, "loss": 0.9654, "lr": 4.046342642073433e-06, "epoch": 0.5745614035087719, "percentage": 57.46, "elapsed_time": "0:26:52", "remaining_time": "0:19:53"}
132
+ {"current_steps": 132, "total_steps": 228, "loss": 0.991, "lr": 3.976669398546451e-06, "epoch": 0.5789473684210527, "percentage": 57.89, "elapsed_time": "0:27:03", "remaining_time": "0:19:40"}
133
+ {"current_steps": 133, "total_steps": 228, "loss": 0.9702, "lr": 3.907202942311506e-06, "epoch": 0.5833333333333334, "percentage": 58.33, "elapsed_time": "0:27:15", "remaining_time": "0:19:27"}
134
+ {"current_steps": 134, "total_steps": 228, "loss": 0.9451, "lr": 3.837957310650738e-06, "epoch": 0.5877192982456141, "percentage": 58.77, "elapsed_time": "0:27:27", "remaining_time": "0:19:15"}
135
+ {"current_steps": 135, "total_steps": 228, "loss": 0.9498, "lr": 3.7689464962236367e-06, "epoch": 0.5921052631578947, "percentage": 59.21, "elapsed_time": "0:27:39", "remaining_time": "0:19:03"}
136
+ {"current_steps": 136, "total_steps": 228, "loss": 0.9828, "lr": 3.700184444239524e-06, "epoch": 0.5964912280701754, "percentage": 59.65, "elapsed_time": "0:27:51", "remaining_time": "0:18:50"}
137
+ {"current_steps": 137, "total_steps": 228, "loss": 0.956, "lr": 3.6316850496395863e-06, "epoch": 0.6008771929824561, "percentage": 60.09, "elapsed_time": "0:28:03", "remaining_time": "0:18:38"}
138
+ {"current_steps": 138, "total_steps": 228, "loss": 0.9553, "lr": 3.563462154289098e-06, "epoch": 0.6052631578947368, "percentage": 60.53, "elapsed_time": "0:28:14", "remaining_time": "0:18:24"}
139
+ {"current_steps": 139, "total_steps": 228, "loss": 1.0269, "lr": 3.49552954418035e-06, "epoch": 0.6096491228070176, "percentage": 60.96, "elapsed_time": "0:28:28", "remaining_time": "0:18:13"}
140
+ {"current_steps": 140, "total_steps": 228, "loss": 0.9888, "lr": 3.4279009466468825e-06, "epoch": 0.6140350877192983, "percentage": 61.4, "elapsed_time": "0:28:40", "remaining_time": "0:18:01"}
141
+ {"current_steps": 141, "total_steps": 228, "loss": 1.0003, "lr": 3.3605900275895565e-06, "epoch": 0.618421052631579, "percentage": 61.84, "elapsed_time": "0:28:52", "remaining_time": "0:17:48"}
142
+ {"current_steps": 142, "total_steps": 228, "loss": 0.9982, "lr": 3.2936103887150484e-06, "epoch": 0.6228070175438597, "percentage": 62.28, "elapsed_time": "0:29:04", "remaining_time": "0:17:36"}
143
+ {"current_steps": 143, "total_steps": 228, "loss": 0.9645, "lr": 3.226975564787322e-06, "epoch": 0.6271929824561403, "percentage": 62.72, "elapsed_time": "0:29:16", "remaining_time": "0:17:24"}
144
+ {"current_steps": 144, "total_steps": 228, "loss": 0.9443, "lr": 3.1606990208926125e-06, "epoch": 0.631578947368421, "percentage": 63.16, "elapsed_time": "0:29:28", "remaining_time": "0:17:11"}
145
+ {"current_steps": 145, "total_steps": 228, "loss": 1.0069, "lr": 3.0947941497184985e-06, "epoch": 0.6359649122807017, "percentage": 63.6, "elapsed_time": "0:29:40", "remaining_time": "0:16:59"}
146
+ {"current_steps": 146, "total_steps": 228, "loss": 0.9718, "lr": 3.0292742688476125e-06, "epoch": 0.6403508771929824, "percentage": 64.04, "elapsed_time": "0:29:52", "remaining_time": "0:16:46"}
147
+ {"current_steps": 147, "total_steps": 228, "loss": 0.9981, "lr": 2.964152618066508e-06, "epoch": 0.6447368421052632, "percentage": 64.47, "elapsed_time": "0:30:03", "remaining_time": "0:16:33"}
148
+ {"current_steps": 148, "total_steps": 228, "loss": 0.994, "lr": 2.899442356690271e-06, "epoch": 0.6491228070175439, "percentage": 64.91, "elapsed_time": "0:30:16", "remaining_time": "0:16:21"}
149
+ {"current_steps": 149, "total_steps": 228, "loss": 0.9605, "lr": 2.835156560903365e-06, "epoch": 0.6535087719298246, "percentage": 65.35, "elapsed_time": "0:30:26", "remaining_time": "0:16:08"}
150
+ {"current_steps": 150, "total_steps": 228, "loss": 0.9965, "lr": 2.771308221117309e-06, "epoch": 0.6578947368421053, "percentage": 65.79, "elapsed_time": "0:30:38", "remaining_time": "0:15:55"}
151
+ {"current_steps": 151, "total_steps": 228, "loss": 1.0161, "lr": 2.7079102393456503e-06, "epoch": 0.6622807017543859, "percentage": 66.23, "elapsed_time": "0:30:49", "remaining_time": "0:15:43"}
152
+ {"current_steps": 152, "total_steps": 228, "loss": 0.9696, "lr": 2.6449754265968263e-06, "epoch": 0.6666666666666666, "percentage": 66.67, "elapsed_time": "0:31:01", "remaining_time": "0:15:30"}
153
+ {"current_steps": 153, "total_steps": 228, "loss": 0.9674, "lr": 2.5825165002854124e-06, "epoch": 0.6710526315789473, "percentage": 67.11, "elapsed_time": "0:31:13", "remaining_time": "0:15:18"}
154
+ {"current_steps": 154, "total_steps": 228, "loss": 0.9933, "lr": 2.5205460816622684e-06, "epoch": 0.6754385964912281, "percentage": 67.54, "elapsed_time": "0:31:24", "remaining_time": "0:15:05"}
155
+ {"current_steps": 155, "total_steps": 228, "loss": 0.977, "lr": 2.4590766932641353e-06, "epoch": 0.6798245614035088, "percentage": 67.98, "elapsed_time": "0:31:36", "remaining_time": "0:14:53"}
156
+ {"current_steps": 156, "total_steps": 228, "loss": 0.9729, "lr": 2.3981207563831633e-06, "epoch": 0.6842105263157895, "percentage": 68.42, "elapsed_time": "0:31:48", "remaining_time": "0:14:41"}
157
+ {"current_steps": 157, "total_steps": 228, "loss": 0.914, "lr": 2.3376905885569185e-06, "epoch": 0.6885964912280702, "percentage": 68.86, "elapsed_time": "0:32:01", "remaining_time": "0:14:28"}
158
+ {"current_steps": 158, "total_steps": 228, "loss": 0.9641, "lr": 2.2777984010793264e-06, "epoch": 0.6929824561403509, "percentage": 69.3, "elapsed_time": "0:32:12", "remaining_time": "0:14:16"}
159
+ {"current_steps": 159, "total_steps": 228, "loss": 0.9074, "lr": 2.2184562965331203e-06, "epoch": 0.6973684210526315, "percentage": 69.74, "elapsed_time": "0:32:25", "remaining_time": "0:14:04"}
160
+ {"current_steps": 160, "total_steps": 228, "loss": 0.938, "lr": 2.159676266344222e-06, "epoch": 0.7017543859649122, "percentage": 70.18, "elapsed_time": "0:32:38", "remaining_time": "0:13:52"}
161
+ {"current_steps": 161, "total_steps": 228, "loss": 0.9387, "lr": 2.1014701883586087e-06, "epoch": 0.706140350877193, "percentage": 70.61, "elapsed_time": "0:32:51", "remaining_time": "0:13:40"}
162
+ {"current_steps": 162, "total_steps": 228, "loss": 0.9931, "lr": 2.043849824442124e-06, "epoch": 0.7105263157894737, "percentage": 71.05, "elapsed_time": "0:33:03", "remaining_time": "0:13:28"}
163
+ {"current_steps": 163, "total_steps": 228, "loss": 0.9578, "lr": 1.9868268181037186e-06, "epoch": 0.7149122807017544, "percentage": 71.49, "elapsed_time": "0:33:15", "remaining_time": "0:13:15"}
164
+ {"current_steps": 164, "total_steps": 228, "loss": 0.9615, "lr": 1.9304126921426235e-06, "epoch": 0.7192982456140351, "percentage": 71.93, "elapsed_time": "0:33:27", "remaining_time": "0:13:03"}
165
+ {"current_steps": 165, "total_steps": 228, "loss": 0.952, "lr": 1.8746188463198983e-06, "epoch": 0.7236842105263158, "percentage": 72.37, "elapsed_time": "0:33:40", "remaining_time": "0:12:51"}
166
+ {"current_steps": 166, "total_steps": 228, "loss": 0.9422, "lr": 1.8194565550548477e-06, "epoch": 0.7280701754385965, "percentage": 72.81, "elapsed_time": "0:33:53", "remaining_time": "0:12:39"}
167
+ {"current_steps": 167, "total_steps": 228, "loss": 0.9556, "lr": 1.764936965146773e-06, "epoch": 0.7324561403508771, "percentage": 73.25, "elapsed_time": "0:34:05", "remaining_time": "0:12:26"}
168
+ {"current_steps": 168, "total_steps": 228, "loss": 0.9609, "lr": 1.7110710935225055e-06, "epoch": 0.7368421052631579, "percentage": 73.68, "elapsed_time": "0:34:17", "remaining_time": "0:12:14"}
169
+ {"current_steps": 169, "total_steps": 228, "loss": 0.9945, "lr": 1.6578698250101828e-06, "epoch": 0.7412280701754386, "percentage": 74.12, "elapsed_time": "0:34:29", "remaining_time": "0:12:02"}
170
+ {"current_steps": 170, "total_steps": 228, "loss": 0.9526, "lr": 1.6053439101397257e-06, "epoch": 0.7456140350877193, "percentage": 74.56, "elapsed_time": "0:34:42", "remaining_time": "0:11:50"}
171
+ {"current_steps": 171, "total_steps": 228, "loss": 0.953, "lr": 1.5535039629704467e-06, "epoch": 0.75, "percentage": 75.0, "elapsed_time": "0:34:54", "remaining_time": "0:11:38"}
172
+ {"current_steps": 172, "total_steps": 228, "loss": 0.9418, "lr": 1.502360458946232e-06, "epoch": 0.7543859649122807, "percentage": 75.44, "elapsed_time": "0:35:07", "remaining_time": "0:11:26"}
173
+ {"current_steps": 173, "total_steps": 228, "loss": 0.9842, "lr": 1.451923732778745e-06, "epoch": 0.7587719298245614, "percentage": 75.88, "elapsed_time": "0:35:19", "remaining_time": "0:11:13"}
174
+ {"current_steps": 174, "total_steps": 228, "loss": 0.9483, "lr": 1.4022039763590595e-06, "epoch": 0.7631578947368421, "percentage": 76.32, "elapsed_time": "0:35:32", "remaining_time": "0:11:01"}
175
+ {"current_steps": 175, "total_steps": 228, "loss": 1.0225, "lr": 1.3532112366981598e-06, "epoch": 0.7675438596491229, "percentage": 76.75, "elapsed_time": "0:35:45", "remaining_time": "0:10:49"}
176
+ {"current_steps": 176, "total_steps": 228, "loss": 0.9787, "lr": 1.3049554138967052e-06, "epoch": 0.7719298245614035, "percentage": 77.19, "elapsed_time": "0:35:57", "remaining_time": "0:10:37"}
177
+ {"current_steps": 177, "total_steps": 228, "loss": 0.9812, "lr": 1.257446259144494e-06, "epoch": 0.7763157894736842, "percentage": 77.63, "elapsed_time": "0:36:09", "remaining_time": "0:10:25"}
178
+ {"current_steps": 178, "total_steps": 228, "loss": 0.9686, "lr": 1.210693372750017e-06, "epoch": 0.7807017543859649, "percentage": 78.07, "elapsed_time": "0:36:21", "remaining_time": "0:10:12"}
179
+ {"current_steps": 179, "total_steps": 228, "loss": 0.9909, "lr": 1.1647062022004845e-06, "epoch": 0.7850877192982456, "percentage": 78.51, "elapsed_time": "0:36:32", "remaining_time": "0:10:00"}
180
+ {"current_steps": 180, "total_steps": 228, "loss": 0.9682, "lr": 1.1194940402527566e-06, "epoch": 0.7894736842105263, "percentage": 78.95, "elapsed_time": "0:36:44", "remaining_time": "0:09:47"}
181
+ {"current_steps": 181, "total_steps": 228, "loss": 0.9218, "lr": 1.075066023055527e-06, "epoch": 0.793859649122807, "percentage": 79.39, "elapsed_time": "0:36:57", "remaining_time": "0:09:35"}
182
+ {"current_steps": 182, "total_steps": 228, "loss": 0.9753, "lr": 1.0314311283031531e-06, "epoch": 0.7982456140350878, "percentage": 79.82, "elapsed_time": "0:37:10", "remaining_time": "0:09:23"}
183
+ {"current_steps": 183, "total_steps": 228, "loss": 0.9746, "lr": 9.885981734215094e-07, "epoch": 0.8026315789473685, "percentage": 80.26, "elapsed_time": "0:37:23", "remaining_time": "0:09:11"}
184
+ {"current_steps": 184, "total_steps": 228, "loss": 0.9479, "lr": 9.465758137862264e-07, "epoch": 0.8070175438596491, "percentage": 80.7, "elapsed_time": "0:37:35", "remaining_time": "0:08:59"}
185
+ {"current_steps": 185, "total_steps": 228, "loss": 0.9734, "lr": 9.053725409736752e-07, "epoch": 0.8114035087719298, "percentage": 81.14, "elapsed_time": "0:37:48", "remaining_time": "0:08:47"}
186
+ {"current_steps": 186, "total_steps": 228, "loss": 0.9145, "lr": 8.649966810450472e-07, "epoch": 0.8157894736842105, "percentage": 81.58, "elapsed_time": "0:38:01", "remaining_time": "0:08:35"}
187
+ {"current_steps": 187, "total_steps": 228, "loss": 0.9302, "lr": 8.254563928638892e-07, "epoch": 0.8201754385964912, "percentage": 82.02, "elapsed_time": "0:38:14", "remaining_time": "0:08:23"}
188
+ {"current_steps": 188, "total_steps": 228, "loss": 0.9509, "lr": 7.86759666447412e-07, "epoch": 0.8245614035087719, "percentage": 82.46, "elapsed_time": "0:38:26", "remaining_time": "0:08:10"}
189
+ {"current_steps": 189, "total_steps": 228, "loss": 0.9081, "lr": 7.489143213519301e-07, "epoch": 0.8289473684210527, "percentage": 82.89, "elapsed_time": "0:38:38", "remaining_time": "0:07:58"}
190
+ {"current_steps": 190, "total_steps": 228, "loss": 0.9404, "lr": 7.119280050927407e-07, "epoch": 0.8333333333333334, "percentage": 83.33, "elapsed_time": "0:38:49", "remaining_time": "0:07:45"}
191
+ {"current_steps": 191, "total_steps": 228, "loss": 0.9482, "lr": 6.758081915987669e-07, "epoch": 0.8377192982456141, "percentage": 83.77, "elapsed_time": "0:39:02", "remaining_time": "0:07:33"}
192
+ {"current_steps": 192, "total_steps": 228, "loss": 0.9161, "lr": 6.405621797022848e-07, "epoch": 0.8421052631578947, "percentage": 84.21, "elapsed_time": "0:39:13", "remaining_time": "0:07:21"}
193
+ {"current_steps": 193, "total_steps": 228, "loss": 0.9351, "lr": 6.061970916640236e-07, "epoch": 0.8464912280701754, "percentage": 84.65, "elapsed_time": "0:39:25", "remaining_time": "0:07:08"}
194
+ {"current_steps": 194, "total_steps": 228, "loss": 0.9633, "lr": 5.727198717339511e-07, "epoch": 0.8508771929824561, "percentage": 85.09, "elapsed_time": "0:39:39", "remaining_time": "0:06:56"}
195
+ {"current_steps": 195, "total_steps": 228, "loss": 0.9923, "lr": 5.401372847480285e-07, "epoch": 0.8552631578947368, "percentage": 85.53, "elapsed_time": "0:39:50", "remaining_time": "0:06:44"}
196
+ {"current_steps": 196, "total_steps": 228, "loss": 0.9049, "lr": 5.084559147612244e-07, "epoch": 0.8596491228070176, "percentage": 85.96, "elapsed_time": "0:40:01", "remaining_time": "0:06:32"}
197
+ {"current_steps": 197, "total_steps": 228, "loss": 0.9344, "lr": 4.776821637170525e-07, "epoch": 0.8640350877192983, "percentage": 86.4, "elapsed_time": "0:40:14", "remaining_time": "0:06:19"}
198
+ {"current_steps": 198, "total_steps": 228, "loss": 0.9594, "lr": 4.4782225015391754e-07, "epoch": 0.868421052631579, "percentage": 86.84, "elapsed_time": "0:40:25", "remaining_time": "0:06:07"}
199
+ {"current_steps": 199, "total_steps": 228, "loss": 0.9519, "lr": 4.1888220794851386e-07, "epoch": 0.8728070175438597, "percentage": 87.28, "elapsed_time": "0:40:37", "remaining_time": "0:05:55"}
200
+ {"current_steps": 200, "total_steps": 228, "loss": 0.9539, "lr": 3.908678850965425e-07, "epoch": 0.8771929824561403, "percentage": 87.72, "elapsed_time": "0:40:49", "remaining_time": "0:05:42"}
201
+ {"current_steps": 201, "total_steps": 228, "loss": 0.9506, "lr": 3.6378494253099307e-07, "epoch": 0.881578947368421, "percentage": 88.16, "elapsed_time": "0:41:01", "remaining_time": "0:05:30"}
202
+ {"current_steps": 202, "total_steps": 228, "loss": 0.9309, "lr": 3.3763885297822153e-07, "epoch": 0.8859649122807017, "percentage": 88.6, "elapsed_time": "0:41:14", "remaining_time": "0:05:18"}
203
+ {"current_steps": 203, "total_steps": 228, "loss": 0.9497, "lr": 3.1243489985206097e-07, "epoch": 0.8903508771929824, "percentage": 89.04, "elapsed_time": "0:41:27", "remaining_time": "0:05:06"}
204
+ {"current_steps": 204, "total_steps": 228, "loss": 0.9707, "lr": 2.8817817618618846e-07, "epoch": 0.8947368421052632, "percentage": 89.47, "elapsed_time": "0:41:40", "remaining_time": "0:04:54"}
205
+ {"current_steps": 205, "total_steps": 228, "loss": 0.953, "lr": 2.648735836049615e-07, "epoch": 0.8991228070175439, "percentage": 89.91, "elapsed_time": "0:41:53", "remaining_time": "0:04:42"}
206
+ {"current_steps": 206, "total_steps": 228, "loss": 0.9465, "lr": 2.4252583133292927e-07, "epoch": 0.9035087719298246, "percentage": 90.35, "elapsed_time": "0:42:05", "remaining_time": "0:04:29"}
207
+ {"current_steps": 207, "total_steps": 228, "loss": 0.9527, "lr": 2.2113943524323167e-07, "epoch": 0.9078947368421053, "percentage": 90.79, "elapsed_time": "0:42:18", "remaining_time": "0:04:17"}
208
+ {"current_steps": 208, "total_steps": 228, "loss": 0.9498, "lr": 2.007187169450603e-07, "epoch": 0.9122807017543859, "percentage": 91.23, "elapsed_time": "0:42:29", "remaining_time": "0:04:05"}
209
+ {"current_steps": 209, "total_steps": 228, "loss": 0.9582, "lr": 1.8126780291038037e-07, "epoch": 0.9166666666666666, "percentage": 91.67, "elapsed_time": "0:42:43", "remaining_time": "0:03:53"}
210
+ {"current_steps": 210, "total_steps": 228, "loss": 0.9169, "lr": 1.6279062364008446e-07, "epoch": 0.9210526315789473, "percentage": 92.11, "elapsed_time": "0:42:54", "remaining_time": "0:03:40"}
211
+ {"current_steps": 211, "total_steps": 228, "loss": 0.9789, "lr": 1.4529091286973994e-07, "epoch": 0.9254385964912281, "percentage": 92.54, "elapsed_time": "0:43:07", "remaining_time": "0:03:28"}
212
+ {"current_steps": 212, "total_steps": 228, "loss": 0.9705, "lr": 1.2877220681510927e-07, "epoch": 0.9298245614035088, "percentage": 92.98, "elapsed_time": "0:43:20", "remaining_time": "0:03:16"}
213
+ {"current_steps": 213, "total_steps": 228, "loss": 0.9249, "lr": 1.1323784345757205e-07, "epoch": 0.9342105263157895, "percentage": 93.42, "elapsed_time": "0:43:32", "remaining_time": "0:03:03"}
214
+ {"current_steps": 214, "total_steps": 228, "loss": 0.9967, "lr": 9.869096186961025e-08, "epoch": 0.9385964912280702, "percentage": 93.86, "elapsed_time": "0:43:43", "remaining_time": "0:02:51"}
215
+ {"current_steps": 215, "total_steps": 228, "loss": 0.9782, "lr": 8.513450158049109e-08, "epoch": 0.9429824561403509, "percentage": 94.3, "elapsed_time": "0:43:57", "remaining_time": "0:02:39"}
216
+ {"current_steps": 216, "total_steps": 228, "loss": 0.9853, "lr": 7.257120198226219e-08, "epoch": 0.9473684210526315, "percentage": 94.74, "elapsed_time": "0:44:10", "remaining_time": "0:02:27"}
217
+ {"current_steps": 217, "total_steps": 228, "loss": 0.8929, "lr": 6.100360177619946e-08, "epoch": 0.9517543859649122, "percentage": 95.18, "elapsed_time": "0:44:23", "remaining_time": "0:02:14"}
218
+ {"current_steps": 218, "total_steps": 228, "loss": 0.9297, "lr": 5.0434038459801213e-08, "epoch": 0.956140350877193, "percentage": 95.61, "elapsed_time": "0:44:36", "remaining_time": "0:02:02"}
219
+ {"current_steps": 219, "total_steps": 228, "loss": 0.9525, "lr": 4.086464785444777e-08, "epoch": 0.9605263157894737, "percentage": 96.05, "elapsed_time": "0:44:48", "remaining_time": "0:01:50"}
220
+ {"current_steps": 220, "total_steps": 228, "loss": 0.9839, "lr": 3.229736367380498e-08, "epoch": 0.9649122807017544, "percentage": 96.49, "elapsed_time": "0:45:00", "remaining_time": "0:01:38"}
221
+ {"current_steps": 221, "total_steps": 228, "loss": 0.9349, "lr": 2.4733917133077378e-08, "epoch": 0.9692982456140351, "percentage": 96.93, "elapsed_time": "0:45:13", "remaining_time": "0:01:25"}
222
+ {"current_steps": 222, "total_steps": 228, "loss": 0.9716, "lr": 1.8175836599173545e-08, "epoch": 0.9736842105263158, "percentage": 97.37, "elapsed_time": "0:45:25", "remaining_time": "0:01:13"}
223
+ {"current_steps": 223, "total_steps": 228, "loss": 0.9661, "lr": 1.2624447281867625e-08, "epoch": 0.9780701754385965, "percentage": 97.81, "elapsed_time": "0:45:36", "remaining_time": "0:01:01"}
224
+ {"current_steps": 224, "total_steps": 228, "loss": 0.9614, "lr": 8.080870966008513e-09, "epoch": 0.9824561403508771, "percentage": 98.25, "elapsed_time": "0:45:48", "remaining_time": "0:00:49"}
225
+ {"current_steps": 225, "total_steps": 228, "loss": 0.9655, "lr": 4.546025784837316e-09, "epoch": 0.9868421052631579, "percentage": 98.68, "elapsed_time": "0:46:02", "remaining_time": "0:00:36"}
226
+ {"current_steps": 226, "total_steps": 228, "loss": 0.9867, "lr": 2.0206260344590724e-09, "epoch": 0.9912280701754386, "percentage": 99.12, "elapsed_time": "0:46:12", "remaining_time": "0:00:24"}
227
+ {"current_steps": 227, "total_steps": 228, "loss": 0.9372, "lr": 5.051820295032262e-10, "epoch": 0.9956140350877193, "percentage": 99.56, "elapsed_time": "0:46:23", "remaining_time": "0:00:12"}
228
+ {"current_steps": 228, "total_steps": 228, "loss": 0.9237, "lr": 0.0, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "0:46:37", "remaining_time": "0:00:00"}
229
+ {"current_steps": 228, "total_steps": 228, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "0:47:29", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,1638 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.0,
5
+ "eval_steps": 500,
6
+ "global_step": 228,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0043859649122807015,
13
+ "grad_norm": 49.592716217041016,
14
+ "learning_rate": 1.4285714285714286e-06,
15
+ "loss": 1.2955,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.008771929824561403,
20
+ "grad_norm": 52.511619567871094,
21
+ "learning_rate": 2.8571428571428573e-06,
22
+ "loss": 1.3459,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.013157894736842105,
27
+ "grad_norm": 23.82323455810547,
28
+ "learning_rate": 4.2857142857142855e-06,
29
+ "loss": 1.2305,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.017543859649122806,
34
+ "grad_norm": 11.829014778137207,
35
+ "learning_rate": 5.7142857142857145e-06,
36
+ "loss": 1.0947,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.021929824561403508,
41
+ "grad_norm": 20.558698654174805,
42
+ "learning_rate": 7.1428571428571436e-06,
43
+ "loss": 1.0768,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.02631578947368421,
48
+ "grad_norm": 14.469958305358887,
49
+ "learning_rate": 8.571428571428571e-06,
50
+ "loss": 1.0448,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.03070175438596491,
55
+ "grad_norm": 7.883849620819092,
56
+ "learning_rate": 1e-05,
57
+ "loss": 1.0738,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.03508771929824561,
62
+ "grad_norm": 7.827476978302002,
63
+ "learning_rate": 9.999494817970498e-06,
64
+ "loss": 1.0353,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.039473684210526314,
69
+ "grad_norm": 8.651590347290039,
70
+ "learning_rate": 9.997979373965542e-06,
71
+ "loss": 0.9942,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.043859649122807015,
76
+ "grad_norm": 6.32966423034668,
77
+ "learning_rate": 9.995453974215164e-06,
78
+ "loss": 1.0467,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.04824561403508772,
83
+ "grad_norm": 5.549691677093506,
84
+ "learning_rate": 9.991919129033994e-06,
85
+ "loss": 0.9998,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.05263157894736842,
90
+ "grad_norm": 5.89638614654541,
91
+ "learning_rate": 9.987375552718133e-06,
92
+ "loss": 0.9997,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.05701754385964912,
97
+ "grad_norm": 6.868354320526123,
98
+ "learning_rate": 9.981824163400827e-06,
99
+ "loss": 1.0602,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.06140350877192982,
104
+ "grad_norm": 5.4902496337890625,
105
+ "learning_rate": 9.975266082866923e-06,
106
+ "loss": 1.043,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.06578947368421052,
111
+ "grad_norm": 5.782011032104492,
112
+ "learning_rate": 9.967702636326195e-06,
113
+ "loss": 1.0331,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.07017543859649122,
118
+ "grad_norm": 6.002492427825928,
119
+ "learning_rate": 9.959135352145552e-06,
120
+ "loss": 1.0376,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.07456140350877193,
125
+ "grad_norm": 5.092071056365967,
126
+ "learning_rate": 9.9495659615402e-06,
127
+ "loss": 1.0244,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.07894736842105263,
132
+ "grad_norm": 5.242514133453369,
133
+ "learning_rate": 9.938996398223802e-06,
134
+ "loss": 0.9812,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.08333333333333333,
139
+ "grad_norm": 5.504293918609619,
140
+ "learning_rate": 9.927428798017738e-06,
141
+ "loss": 1.0288,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.08771929824561403,
146
+ "grad_norm": 5.18280029296875,
147
+ "learning_rate": 9.91486549841951e-06,
148
+ "loss": 1.0403,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.09210526315789473,
153
+ "grad_norm": 5.409468173980713,
154
+ "learning_rate": 9.901309038130392e-06,
155
+ "loss": 1.046,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.09649122807017543,
160
+ "grad_norm": 4.993797779083252,
161
+ "learning_rate": 9.886762156542428e-06,
162
+ "loss": 1.0609,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.10087719298245613,
167
+ "grad_norm": 4.722639083862305,
168
+ "learning_rate": 9.871227793184893e-06,
169
+ "loss": 1.03,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.10526315789473684,
174
+ "grad_norm": 5.58522367477417,
175
+ "learning_rate": 9.854709087130261e-06,
176
+ "loss": 1.0388,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.10964912280701754,
181
+ "grad_norm": 5.170425891876221,
182
+ "learning_rate": 9.837209376359918e-06,
183
+ "loss": 1.0588,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.11403508771929824,
188
+ "grad_norm": 5.433144569396973,
189
+ "learning_rate": 9.81873219708962e-06,
190
+ "loss": 1.0688,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.11842105263157894,
195
+ "grad_norm": 4.71676778793335,
196
+ "learning_rate": 9.79928128305494e-06,
197
+ "loss": 1.0311,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.12280701754385964,
202
+ "grad_norm": 4.437475681304932,
203
+ "learning_rate": 9.778860564756769e-06,
204
+ "loss": 1.0086,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.12719298245614036,
209
+ "grad_norm": 5.034616947174072,
210
+ "learning_rate": 9.757474168667072e-06,
211
+ "loss": 1.0627,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.13157894736842105,
216
+ "grad_norm": 4.68194055557251,
217
+ "learning_rate": 9.73512641639504e-06,
218
+ "loss": 1.0349,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.13596491228070176,
223
+ "grad_norm": 4.9859700202941895,
224
+ "learning_rate": 9.711821823813812e-06,
225
+ "loss": 1.1047,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.14035087719298245,
230
+ "grad_norm": 4.754051685333252,
231
+ "learning_rate": 9.68756510014794e-06,
232
+ "loss": 1.0646,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.14473684210526316,
237
+ "grad_norm": 4.727588653564453,
238
+ "learning_rate": 9.66236114702178e-06,
239
+ "loss": 1.0211,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.14912280701754385,
244
+ "grad_norm": 4.892414569854736,
245
+ "learning_rate": 9.636215057469009e-06,
246
+ "loss": 1.03,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.15350877192982457,
251
+ "grad_norm": 4.797459602355957,
252
+ "learning_rate": 9.609132114903458e-06,
253
+ "loss": 1.0495,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.15789473684210525,
258
+ "grad_norm": 4.844034194946289,
259
+ "learning_rate": 9.581117792051487e-06,
260
+ "loss": 1.0653,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.16228070175438597,
265
+ "grad_norm": 4.3364057540893555,
266
+ "learning_rate": 9.552177749846083e-06,
267
+ "loss": 1.0085,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.16666666666666666,
272
+ "grad_norm": 4.658485412597656,
273
+ "learning_rate": 9.522317836282949e-06,
274
+ "loss": 1.0548,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.17105263157894737,
279
+ "grad_norm": 4.726075649261475,
280
+ "learning_rate": 9.491544085238778e-06,
281
+ "loss": 1.0704,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.17543859649122806,
286
+ "grad_norm": 4.422685623168945,
287
+ "learning_rate": 9.459862715251973e-06,
288
+ "loss": 1.0463,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.17982456140350878,
293
+ "grad_norm": 4.876737117767334,
294
+ "learning_rate": 9.427280128266049e-06,
295
+ "loss": 1.0771,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.18421052631578946,
300
+ "grad_norm": 4.657575607299805,
301
+ "learning_rate": 9.393802908335978e-06,
302
+ "loss": 1.0615,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.18859649122807018,
307
+ "grad_norm": 4.246818542480469,
308
+ "learning_rate": 9.359437820297716e-06,
309
+ "loss": 1.0431,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.19298245614035087,
314
+ "grad_norm": 4.417186737060547,
315
+ "learning_rate": 9.324191808401235e-06,
316
+ "loss": 1.0271,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.19736842105263158,
321
+ "grad_norm": 4.741846084594727,
322
+ "learning_rate": 9.288071994907262e-06,
323
+ "loss": 1.0765,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.20175438596491227,
328
+ "grad_norm": 5.002007007598877,
329
+ "learning_rate": 9.251085678648072e-06,
330
+ "loss": 1.0526,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.20614035087719298,
335
+ "grad_norm": 4.708237171173096,
336
+ "learning_rate": 9.213240333552589e-06,
337
+ "loss": 1.0502,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.21052631578947367,
342
+ "grad_norm": 4.335841655731201,
343
+ "learning_rate": 9.174543607136111e-06,
344
+ "loss": 0.9878,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.2149122807017544,
349
+ "grad_norm": 4.573909282684326,
350
+ "learning_rate": 9.135003318954954e-06,
351
+ "loss": 0.9989,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.21929824561403508,
356
+ "grad_norm": 4.54152774810791,
357
+ "learning_rate": 9.094627459026326e-06,
358
+ "loss": 1.0816,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.2236842105263158,
363
+ "grad_norm": 4.57687520980835,
364
+ "learning_rate": 9.053424186213776e-06,
365
+ "loss": 1.0509,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.22807017543859648,
370
+ "grad_norm": 4.246875762939453,
371
+ "learning_rate": 9.011401826578492e-06,
372
+ "loss": 0.9849,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.2324561403508772,
377
+ "grad_norm": 4.673925876617432,
378
+ "learning_rate": 8.968568871696847e-06,
379
+ "loss": 0.9975,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.23684210526315788,
384
+ "grad_norm": 4.580467224121094,
385
+ "learning_rate": 8.924933976944474e-06,
386
+ "loss": 1.0747,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.2412280701754386,
391
+ "grad_norm": 4.450971603393555,
392
+ "learning_rate": 8.880505959747245e-06,
393
+ "loss": 1.028,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.24561403508771928,
398
+ "grad_norm": 4.807090759277344,
399
+ "learning_rate": 8.835293797799517e-06,
400
+ "loss": 0.9659,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.25,
405
+ "grad_norm": 4.357285022735596,
406
+ "learning_rate": 8.789306627249985e-06,
407
+ "loss": 1.0228,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.2543859649122807,
412
+ "grad_norm": 4.60875940322876,
413
+ "learning_rate": 8.742553740855507e-06,
414
+ "loss": 1.0338,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.25877192982456143,
419
+ "grad_norm": 4.6360321044921875,
420
+ "learning_rate": 8.695044586103297e-06,
421
+ "loss": 1.0344,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.2631578947368421,
426
+ "grad_norm": 4.603052139282227,
427
+ "learning_rate": 8.646788763301842e-06,
428
+ "loss": 1.0423,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.2675438596491228,
433
+ "grad_norm": 4.477334976196289,
434
+ "learning_rate": 8.59779602364094e-06,
435
+ "loss": 1.0372,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.2719298245614035,
440
+ "grad_norm": 4.819328784942627,
441
+ "learning_rate": 8.548076267221258e-06,
442
+ "loss": 1.0935,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.27631578947368424,
447
+ "grad_norm": 4.351193428039551,
448
+ "learning_rate": 8.497639541053769e-06,
449
+ "loss": 0.9915,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.2807017543859649,
454
+ "grad_norm": 9.282684326171875,
455
+ "learning_rate": 8.446496037029555e-06,
456
+ "loss": 1.0252,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.2850877192982456,
461
+ "grad_norm": 4.520321369171143,
462
+ "learning_rate": 8.394656089860274e-06,
463
+ "loss": 0.9989,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.2894736842105263,
468
+ "grad_norm": 4.605741500854492,
469
+ "learning_rate": 8.342130174989819e-06,
470
+ "loss": 1.0872,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.29385964912280704,
475
+ "grad_norm": 4.512912750244141,
476
+ "learning_rate": 8.288928906477497e-06,
477
+ "loss": 1.0545,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.2982456140350877,
482
+ "grad_norm": 4.024132251739502,
483
+ "learning_rate": 8.235063034853228e-06,
484
+ "loss": 1.0128,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.3026315789473684,
489
+ "grad_norm": 4.560809135437012,
490
+ "learning_rate": 8.180543444945154e-06,
491
+ "loss": 1.0294,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.30701754385964913,
496
+ "grad_norm": 4.295252799987793,
497
+ "learning_rate": 8.125381153680103e-06,
498
+ "loss": 1.0445,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.31140350877192985,
503
+ "grad_norm": 4.614228248596191,
504
+ "learning_rate": 8.069587307857377e-06,
505
+ "loss": 1.0168,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.3157894736842105,
510
+ "grad_norm": 4.256739139556885,
511
+ "learning_rate": 8.013173181896283e-06,
512
+ "loss": 1.0207,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.3201754385964912,
517
+ "grad_norm": 4.458586692810059,
518
+ "learning_rate": 7.95615017555788e-06,
519
+ "loss": 1.0401,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.32456140350877194,
524
+ "grad_norm": 4.6360392570495605,
525
+ "learning_rate": 7.898529811641393e-06,
526
+ "loss": 1.0347,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.32894736842105265,
531
+ "grad_norm": 4.286371231079102,
532
+ "learning_rate": 7.84032373365578e-06,
533
+ "loss": 1.061,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.3333333333333333,
538
+ "grad_norm": 4.306332588195801,
539
+ "learning_rate": 7.781543703466881e-06,
540
+ "loss": 0.988,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.33771929824561403,
545
+ "grad_norm": 4.2425947189331055,
546
+ "learning_rate": 7.722201598920673e-06,
547
+ "loss": 1.0442,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.34210526315789475,
552
+ "grad_norm": 4.27526330947876,
553
+ "learning_rate": 7.662309411443084e-06,
554
+ "loss": 1.0412,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.34649122807017546,
559
+ "grad_norm": 4.224104881286621,
560
+ "learning_rate": 7.601879243616838e-06,
561
+ "loss": 1.0205,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.3508771929824561,
566
+ "grad_norm": 4.250307559967041,
567
+ "learning_rate": 7.540923306735868e-06,
568
+ "loss": 1.0298,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.35526315789473684,
573
+ "grad_norm": 4.223860263824463,
574
+ "learning_rate": 7.479453918337733e-06,
575
+ "loss": 1.0174,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.35964912280701755,
580
+ "grad_norm": 4.442715644836426,
581
+ "learning_rate": 7.417483499714589e-06,
582
+ "loss": 1.0281,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.36403508771929827,
587
+ "grad_norm": 4.697696685791016,
588
+ "learning_rate": 7.355024573403174e-06,
589
+ "loss": 1.0045,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.3684210526315789,
594
+ "grad_norm": 4.343409061431885,
595
+ "learning_rate": 7.292089760654352e-06,
596
+ "loss": 1.0253,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.37280701754385964,
601
+ "grad_norm": 4.392301082611084,
602
+ "learning_rate": 7.2286917788826926e-06,
603
+ "loss": 1.0245,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.37719298245614036,
608
+ "grad_norm": 5.410698890686035,
609
+ "learning_rate": 7.1648434390966356e-06,
610
+ "loss": 1.0149,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.3815789473684211,
615
+ "grad_norm": 4.763819694519043,
616
+ "learning_rate": 7.100557643309732e-06,
617
+ "loss": 1.0019,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 0.38596491228070173,
622
+ "grad_norm": 4.71998405456543,
623
+ "learning_rate": 7.035847381933494e-06,
624
+ "loss": 1.0147,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 0.39035087719298245,
629
+ "grad_norm": 4.432498931884766,
630
+ "learning_rate": 6.970725731152389e-06,
631
+ "loss": 0.9572,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.39473684210526316,
636
+ "grad_norm": 4.130496978759766,
637
+ "learning_rate": 6.905205850281502e-06,
638
+ "loss": 0.9651,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.3991228070175439,
643
+ "grad_norm": 4.6582441329956055,
644
+ "learning_rate": 6.8393009791073895e-06,
645
+ "loss": 1.0471,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.40350877192982454,
650
+ "grad_norm": 4.263586521148682,
651
+ "learning_rate": 6.773024435212678e-06,
652
+ "loss": 1.0062,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.40789473684210525,
657
+ "grad_norm": 4.377040386199951,
658
+ "learning_rate": 6.706389611284953e-06,
659
+ "loss": 1.0101,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.41228070175438597,
664
+ "grad_norm": 4.429991722106934,
665
+ "learning_rate": 6.639409972410446e-06,
666
+ "loss": 1.0208,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.4166666666666667,
671
+ "grad_norm": 4.125415325164795,
672
+ "learning_rate": 6.57209905335312e-06,
673
+ "loss": 1.0014,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.42105263157894735,
678
+ "grad_norm": 4.075379848480225,
679
+ "learning_rate": 6.504470455819651e-06,
680
+ "loss": 0.9726,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.42543859649122806,
685
+ "grad_norm": 4.166284084320068,
686
+ "learning_rate": 6.436537845710904e-06,
687
+ "loss": 1.0462,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.4298245614035088,
692
+ "grad_norm": 3.857902765274048,
693
+ "learning_rate": 6.368314950360416e-06,
694
+ "loss": 0.997,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 0.4342105263157895,
699
+ "grad_norm": 4.08658504486084,
700
+ "learning_rate": 6.299815555760478e-06,
701
+ "loss": 1.0351,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 0.43859649122807015,
706
+ "grad_norm": 4.2274651527404785,
707
+ "learning_rate": 6.231053503776363e-06,
708
+ "loss": 1.04,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.44298245614035087,
713
+ "grad_norm": 4.353753089904785,
714
+ "learning_rate": 6.1620426893492645e-06,
715
+ "loss": 0.9802,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.4473684210526316,
720
+ "grad_norm": 4.267554759979248,
721
+ "learning_rate": 6.092797057688496e-06,
722
+ "loss": 1.006,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.4517543859649123,
727
+ "grad_norm": 4.590907573699951,
728
+ "learning_rate": 6.0233306014535505e-06,
729
+ "loss": 0.9833,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.45614035087719296,
734
+ "grad_norm": 4.248331546783447,
735
+ "learning_rate": 5.953657357926569e-06,
736
+ "loss": 1.0409,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.4605263157894737,
741
+ "grad_norm": 3.9719252586364746,
742
+ "learning_rate": 5.883791406175775e-06,
743
+ "loss": 1.0191,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.4649122807017544,
748
+ "grad_norm": 4.340267181396484,
749
+ "learning_rate": 5.813746864210489e-06,
750
+ "loss": 0.9992,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.4692982456140351,
755
+ "grad_norm": 4.42672872543335,
756
+ "learning_rate": 5.743537886128258e-06,
757
+ "loss": 1.0023,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.47368421052631576,
762
+ "grad_norm": 4.119699478149414,
763
+ "learning_rate": 5.673178659254698e-06,
764
+ "loss": 0.9909,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.4780701754385965,
769
+ "grad_norm": 4.342530250549316,
770
+ "learning_rate": 5.6026834012766155e-06,
771
+ "loss": 0.9598,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 0.4824561403508772,
776
+ "grad_norm": 4.101733684539795,
777
+ "learning_rate": 5.532066357369012e-06,
778
+ "loss": 0.9879,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 0.4868421052631579,
783
+ "grad_norm": 4.340119361877441,
784
+ "learning_rate": 5.46134179731651e-06,
785
+ "loss": 1.0184,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 0.49122807017543857,
790
+ "grad_norm": 4.37206506729126,
791
+ "learning_rate": 5.390524012629824e-06,
792
+ "loss": 1.0496,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 0.4956140350877193,
797
+ "grad_norm": 4.118902683258057,
798
+ "learning_rate": 5.319627313657829e-06,
799
+ "loss": 1.0399,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 0.5,
804
+ "grad_norm": 4.071263790130615,
805
+ "learning_rate": 5.248666026695835e-06,
806
+ "loss": 0.9906,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 0.5043859649122807,
811
+ "grad_norm": 4.421329975128174,
812
+ "learning_rate": 5.177654491090627e-06,
813
+ "loss": 0.9836,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 0.5087719298245614,
818
+ "grad_norm": 4.517954349517822,
819
+ "learning_rate": 5.1066070563428736e-06,
820
+ "loss": 1.0083,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 0.5131578947368421,
825
+ "grad_norm": 4.462676525115967,
826
+ "learning_rate": 5.035538079207488e-06,
827
+ "loss": 1.0034,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 0.5175438596491229,
832
+ "grad_norm": 4.097958087921143,
833
+ "learning_rate": 4.964461920792512e-06,
834
+ "loss": 1.0244,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 0.5219298245614035,
839
+ "grad_norm": 4.527233600616455,
840
+ "learning_rate": 4.893392943657127e-06,
841
+ "loss": 0.9473,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 0.5263157894736842,
846
+ "grad_norm": 4.666014671325684,
847
+ "learning_rate": 4.822345508909376e-06,
848
+ "loss": 0.93,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 0.5307017543859649,
853
+ "grad_norm": 4.115242958068848,
854
+ "learning_rate": 4.751333973304166e-06,
855
+ "loss": 1.0262,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 0.5350877192982456,
860
+ "grad_norm": 4.032143592834473,
861
+ "learning_rate": 4.680372686342173e-06,
862
+ "loss": 1.0236,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 0.5394736842105263,
867
+ "grad_norm": 4.429379463195801,
868
+ "learning_rate": 4.609475987370177e-06,
869
+ "loss": 0.9891,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 0.543859649122807,
874
+ "grad_norm": 4.283740043640137,
875
+ "learning_rate": 4.53865820268349e-06,
876
+ "loss": 0.9761,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 0.5482456140350878,
881
+ "grad_norm": 4.0465545654296875,
882
+ "learning_rate": 4.467933642630989e-06,
883
+ "loss": 0.9847,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 0.5526315789473685,
888
+ "grad_norm": 4.005406856536865,
889
+ "learning_rate": 4.397316598723385e-06,
890
+ "loss": 0.9916,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 0.5570175438596491,
895
+ "grad_norm": 4.05433988571167,
896
+ "learning_rate": 4.326821340745304e-06,
897
+ "loss": 0.9535,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 0.5614035087719298,
902
+ "grad_norm": 4.201546669006348,
903
+ "learning_rate": 4.256462113871741e-06,
904
+ "loss": 0.9249,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 0.5657894736842105,
909
+ "grad_norm": 4.1509904861450195,
910
+ "learning_rate": 4.186253135789511e-06,
911
+ "loss": 0.9885,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 0.5701754385964912,
916
+ "grad_norm": 4.355051040649414,
917
+ "learning_rate": 4.116208593824227e-06,
918
+ "loss": 1.0087,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 0.5745614035087719,
923
+ "grad_norm": 4.163605690002441,
924
+ "learning_rate": 4.046342642073433e-06,
925
+ "loss": 0.9654,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 0.5789473684210527,
930
+ "grad_norm": 4.231169700622559,
931
+ "learning_rate": 3.976669398546451e-06,
932
+ "loss": 0.991,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 0.5833333333333334,
937
+ "grad_norm": 4.092093467712402,
938
+ "learning_rate": 3.907202942311506e-06,
939
+ "loss": 0.9702,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 0.5877192982456141,
944
+ "grad_norm": 4.0646233558654785,
945
+ "learning_rate": 3.837957310650738e-06,
946
+ "loss": 0.9451,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 0.5921052631578947,
951
+ "grad_norm": 4.397289276123047,
952
+ "learning_rate": 3.7689464962236367e-06,
953
+ "loss": 0.9498,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 0.5964912280701754,
958
+ "grad_norm": 3.946786403656006,
959
+ "learning_rate": 3.700184444239524e-06,
960
+ "loss": 0.9828,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 0.6008771929824561,
965
+ "grad_norm": 4.302344799041748,
966
+ "learning_rate": 3.6316850496395863e-06,
967
+ "loss": 0.956,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 0.6052631578947368,
972
+ "grad_norm": 4.430941581726074,
973
+ "learning_rate": 3.563462154289098e-06,
974
+ "loss": 0.9553,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 0.6096491228070176,
979
+ "grad_norm": 4.306185722351074,
980
+ "learning_rate": 3.49552954418035e-06,
981
+ "loss": 1.0269,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 0.6140350877192983,
986
+ "grad_norm": 4.095279216766357,
987
+ "learning_rate": 3.4279009466468825e-06,
988
+ "loss": 0.9888,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 0.618421052631579,
993
+ "grad_norm": 4.261977195739746,
994
+ "learning_rate": 3.3605900275895565e-06,
995
+ "loss": 1.0003,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 0.6228070175438597,
1000
+ "grad_norm": 3.9479899406433105,
1001
+ "learning_rate": 3.2936103887150484e-06,
1002
+ "loss": 0.9982,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 0.6271929824561403,
1007
+ "grad_norm": 4.058523654937744,
1008
+ "learning_rate": 3.226975564787322e-06,
1009
+ "loss": 0.9645,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 0.631578947368421,
1014
+ "grad_norm": 4.375962734222412,
1015
+ "learning_rate": 3.1606990208926125e-06,
1016
+ "loss": 0.9443,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 0.6359649122807017,
1021
+ "grad_norm": 4.618311405181885,
1022
+ "learning_rate": 3.0947941497184985e-06,
1023
+ "loss": 1.0069,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 0.6403508771929824,
1028
+ "grad_norm": 4.422805309295654,
1029
+ "learning_rate": 3.0292742688476125e-06,
1030
+ "loss": 0.9718,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 0.6447368421052632,
1035
+ "grad_norm": 3.93886399269104,
1036
+ "learning_rate": 2.964152618066508e-06,
1037
+ "loss": 0.9981,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 0.6491228070175439,
1042
+ "grad_norm": 3.937767744064331,
1043
+ "learning_rate": 2.899442356690271e-06,
1044
+ "loss": 0.994,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 0.6535087719298246,
1049
+ "grad_norm": 4.434424877166748,
1050
+ "learning_rate": 2.835156560903365e-06,
1051
+ "loss": 0.9605,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 0.6578947368421053,
1056
+ "grad_norm": 4.723587989807129,
1057
+ "learning_rate": 2.771308221117309e-06,
1058
+ "loss": 0.9965,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 0.6622807017543859,
1063
+ "grad_norm": 4.1226301193237305,
1064
+ "learning_rate": 2.7079102393456503e-06,
1065
+ "loss": 1.0161,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 0.6666666666666666,
1070
+ "grad_norm": 3.8456857204437256,
1071
+ "learning_rate": 2.6449754265968263e-06,
1072
+ "loss": 0.9696,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 0.6710526315789473,
1077
+ "grad_norm": 3.9130499362945557,
1078
+ "learning_rate": 2.5825165002854124e-06,
1079
+ "loss": 0.9674,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 0.6754385964912281,
1084
+ "grad_norm": 4.2803473472595215,
1085
+ "learning_rate": 2.5205460816622684e-06,
1086
+ "loss": 0.9933,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 0.6798245614035088,
1091
+ "grad_norm": 3.8558075428009033,
1092
+ "learning_rate": 2.4590766932641353e-06,
1093
+ "loss": 0.977,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 0.6842105263157895,
1098
+ "grad_norm": 4.1506452560424805,
1099
+ "learning_rate": 2.3981207563831633e-06,
1100
+ "loss": 0.9729,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 0.6885964912280702,
1105
+ "grad_norm": 4.3258843421936035,
1106
+ "learning_rate": 2.3376905885569185e-06,
1107
+ "loss": 0.914,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 0.6929824561403509,
1112
+ "grad_norm": 4.091325759887695,
1113
+ "learning_rate": 2.2777984010793264e-06,
1114
+ "loss": 0.9641,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 0.6973684210526315,
1119
+ "grad_norm": 4.333226203918457,
1120
+ "learning_rate": 2.2184562965331203e-06,
1121
+ "loss": 0.9074,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 0.7017543859649122,
1126
+ "grad_norm": 3.8289241790771484,
1127
+ "learning_rate": 2.159676266344222e-06,
1128
+ "loss": 0.938,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 0.706140350877193,
1133
+ "grad_norm": 4.058959007263184,
1134
+ "learning_rate": 2.1014701883586087e-06,
1135
+ "loss": 0.9387,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 0.7105263157894737,
1140
+ "grad_norm": 3.7621591091156006,
1141
+ "learning_rate": 2.043849824442124e-06,
1142
+ "loss": 0.9931,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 0.7149122807017544,
1147
+ "grad_norm": 4.214111804962158,
1148
+ "learning_rate": 1.9868268181037186e-06,
1149
+ "loss": 0.9578,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 0.7192982456140351,
1154
+ "grad_norm": 4.251298427581787,
1155
+ "learning_rate": 1.9304126921426235e-06,
1156
+ "loss": 0.9615,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 0.7236842105263158,
1161
+ "grad_norm": 3.980278730392456,
1162
+ "learning_rate": 1.8746188463198983e-06,
1163
+ "loss": 0.952,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 0.7280701754385965,
1168
+ "grad_norm": 4.493079662322998,
1169
+ "learning_rate": 1.8194565550548477e-06,
1170
+ "loss": 0.9422,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 0.7324561403508771,
1175
+ "grad_norm": 4.167688846588135,
1176
+ "learning_rate": 1.764936965146773e-06,
1177
+ "loss": 0.9556,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 0.7368421052631579,
1182
+ "grad_norm": 3.890090227127075,
1183
+ "learning_rate": 1.7110710935225055e-06,
1184
+ "loss": 0.9609,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 0.7412280701754386,
1189
+ "grad_norm": 4.315472602844238,
1190
+ "learning_rate": 1.6578698250101828e-06,
1191
+ "loss": 0.9945,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 0.7456140350877193,
1196
+ "grad_norm": 4.024162769317627,
1197
+ "learning_rate": 1.6053439101397257e-06,
1198
+ "loss": 0.9526,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 0.75,
1203
+ "grad_norm": 4.042996883392334,
1204
+ "learning_rate": 1.5535039629704467e-06,
1205
+ "loss": 0.953,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 0.7543859649122807,
1210
+ "grad_norm": 4.670426845550537,
1211
+ "learning_rate": 1.502360458946232e-06,
1212
+ "loss": 0.9418,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 0.7587719298245614,
1217
+ "grad_norm": 3.7296359539031982,
1218
+ "learning_rate": 1.451923732778745e-06,
1219
+ "loss": 0.9842,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 0.7631578947368421,
1224
+ "grad_norm": 4.069624423980713,
1225
+ "learning_rate": 1.4022039763590595e-06,
1226
+ "loss": 0.9483,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 0.7675438596491229,
1231
+ "grad_norm": 3.8240842819213867,
1232
+ "learning_rate": 1.3532112366981598e-06,
1233
+ "loss": 1.0225,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 0.7719298245614035,
1238
+ "grad_norm": 4.527555465698242,
1239
+ "learning_rate": 1.3049554138967052e-06,
1240
+ "loss": 0.9787,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 0.7763157894736842,
1245
+ "grad_norm": 3.748631238937378,
1246
+ "learning_rate": 1.257446259144494e-06,
1247
+ "loss": 0.9812,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 0.7807017543859649,
1252
+ "grad_norm": 3.8050689697265625,
1253
+ "learning_rate": 1.210693372750017e-06,
1254
+ "loss": 0.9686,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 0.7850877192982456,
1259
+ "grad_norm": 3.9663491249084473,
1260
+ "learning_rate": 1.1647062022004845e-06,
1261
+ "loss": 0.9909,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 0.7894736842105263,
1266
+ "grad_norm": 3.8755075931549072,
1267
+ "learning_rate": 1.1194940402527566e-06,
1268
+ "loss": 0.9682,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 0.793859649122807,
1273
+ "grad_norm": 3.51550030708313,
1274
+ "learning_rate": 1.075066023055527e-06,
1275
+ "loss": 0.9218,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 0.7982456140350878,
1280
+ "grad_norm": 3.9786829948425293,
1281
+ "learning_rate": 1.0314311283031531e-06,
1282
+ "loss": 0.9753,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 0.8026315789473685,
1287
+ "grad_norm": 4.108859539031982,
1288
+ "learning_rate": 9.885981734215094e-07,
1289
+ "loss": 0.9746,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 0.8070175438596491,
1294
+ "grad_norm": 4.208093166351318,
1295
+ "learning_rate": 9.465758137862264e-07,
1296
+ "loss": 0.9479,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 0.8114035087719298,
1301
+ "grad_norm": 4.333961009979248,
1302
+ "learning_rate": 9.053725409736752e-07,
1303
+ "loss": 0.9734,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 0.8157894736842105,
1308
+ "grad_norm": 3.7324233055114746,
1309
+ "learning_rate": 8.649966810450472e-07,
1310
+ "loss": 0.9145,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 0.8201754385964912,
1315
+ "grad_norm": 3.5640878677368164,
1316
+ "learning_rate": 8.254563928638892e-07,
1317
+ "loss": 0.9302,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 0.8245614035087719,
1322
+ "grad_norm": 4.192229270935059,
1323
+ "learning_rate": 7.86759666447412e-07,
1324
+ "loss": 0.9509,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 0.8289473684210527,
1329
+ "grad_norm": 4.028424263000488,
1330
+ "learning_rate": 7.489143213519301e-07,
1331
+ "loss": 0.9081,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 0.8333333333333334,
1336
+ "grad_norm": 3.9033594131469727,
1337
+ "learning_rate": 7.119280050927407e-07,
1338
+ "loss": 0.9404,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 0.8377192982456141,
1343
+ "grad_norm": 4.0438361167907715,
1344
+ "learning_rate": 6.758081915987669e-07,
1345
+ "loss": 0.9482,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 0.8421052631578947,
1350
+ "grad_norm": 4.058148384094238,
1351
+ "learning_rate": 6.405621797022848e-07,
1352
+ "loss": 0.9161,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 0.8464912280701754,
1357
+ "grad_norm": 4.070837020874023,
1358
+ "learning_rate": 6.061970916640236e-07,
1359
+ "loss": 0.9351,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 0.8508771929824561,
1364
+ "grad_norm": 3.934864044189453,
1365
+ "learning_rate": 5.727198717339511e-07,
1366
+ "loss": 0.9633,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 0.8552631578947368,
1371
+ "grad_norm": 3.696377992630005,
1372
+ "learning_rate": 5.401372847480285e-07,
1373
+ "loss": 0.9923,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 0.8596491228070176,
1378
+ "grad_norm": 4.334061622619629,
1379
+ "learning_rate": 5.084559147612244e-07,
1380
+ "loss": 0.9049,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 0.8640350877192983,
1385
+ "grad_norm": 3.781756639480591,
1386
+ "learning_rate": 4.776821637170525e-07,
1387
+ "loss": 0.9344,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 0.868421052631579,
1392
+ "grad_norm": 3.749248504638672,
1393
+ "learning_rate": 4.4782225015391754e-07,
1394
+ "loss": 0.9594,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 0.8728070175438597,
1399
+ "grad_norm": 3.8009796142578125,
1400
+ "learning_rate": 4.1888220794851386e-07,
1401
+ "loss": 0.9519,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 0.8771929824561403,
1406
+ "grad_norm": 3.914064645767212,
1407
+ "learning_rate": 3.908678850965425e-07,
1408
+ "loss": 0.9539,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 0.881578947368421,
1413
+ "grad_norm": 3.75138521194458,
1414
+ "learning_rate": 3.6378494253099307e-07,
1415
+ "loss": 0.9506,
1416
+ "step": 201
1417
+ },
1418
+ {
1419
+ "epoch": 0.8859649122807017,
1420
+ "grad_norm": 3.7791149616241455,
1421
+ "learning_rate": 3.3763885297822153e-07,
1422
+ "loss": 0.9309,
1423
+ "step": 202
1424
+ },
1425
+ {
1426
+ "epoch": 0.8903508771929824,
1427
+ "grad_norm": 3.780644655227661,
1428
+ "learning_rate": 3.1243489985206097e-07,
1429
+ "loss": 0.9497,
1430
+ "step": 203
1431
+ },
1432
+ {
1433
+ "epoch": 0.8947368421052632,
1434
+ "grad_norm": 4.0235676765441895,
1435
+ "learning_rate": 2.8817817618618846e-07,
1436
+ "loss": 0.9707,
1437
+ "step": 204
1438
+ },
1439
+ {
1440
+ "epoch": 0.8991228070175439,
1441
+ "grad_norm": 3.802351951599121,
1442
+ "learning_rate": 2.648735836049615e-07,
1443
+ "loss": 0.953,
1444
+ "step": 205
1445
+ },
1446
+ {
1447
+ "epoch": 0.9035087719298246,
1448
+ "grad_norm": 3.995856523513794,
1449
+ "learning_rate": 2.4252583133292927e-07,
1450
+ "loss": 0.9465,
1451
+ "step": 206
1452
+ },
1453
+ {
1454
+ "epoch": 0.9078947368421053,
1455
+ "grad_norm": 4.330905914306641,
1456
+ "learning_rate": 2.2113943524323167e-07,
1457
+ "loss": 0.9527,
1458
+ "step": 207
1459
+ },
1460
+ {
1461
+ "epoch": 0.9122807017543859,
1462
+ "grad_norm": 4.164973258972168,
1463
+ "learning_rate": 2.007187169450603e-07,
1464
+ "loss": 0.9498,
1465
+ "step": 208
1466
+ },
1467
+ {
1468
+ "epoch": 0.9166666666666666,
1469
+ "grad_norm": 3.576552391052246,
1470
+ "learning_rate": 1.8126780291038037e-07,
1471
+ "loss": 0.9582,
1472
+ "step": 209
1473
+ },
1474
+ {
1475
+ "epoch": 0.9210526315789473,
1476
+ "grad_norm": 3.625762939453125,
1477
+ "learning_rate": 1.6279062364008446e-07,
1478
+ "loss": 0.9169,
1479
+ "step": 210
1480
+ },
1481
+ {
1482
+ "epoch": 0.9254385964912281,
1483
+ "grad_norm": 3.5316288471221924,
1484
+ "learning_rate": 1.4529091286973994e-07,
1485
+ "loss": 0.9789,
1486
+ "step": 211
1487
+ },
1488
+ {
1489
+ "epoch": 0.9298245614035088,
1490
+ "grad_norm": 3.8600916862487793,
1491
+ "learning_rate": 1.2877220681510927e-07,
1492
+ "loss": 0.9705,
1493
+ "step": 212
1494
+ },
1495
+ {
1496
+ "epoch": 0.9342105263157895,
1497
+ "grad_norm": 3.9807686805725098,
1498
+ "learning_rate": 1.1323784345757205e-07,
1499
+ "loss": 0.9249,
1500
+ "step": 213
1501
+ },
1502
+ {
1503
+ "epoch": 0.9385964912280702,
1504
+ "grad_norm": 3.589660406112671,
1505
+ "learning_rate": 9.869096186961025e-08,
1506
+ "loss": 0.9967,
1507
+ "step": 214
1508
+ },
1509
+ {
1510
+ "epoch": 0.9429824561403509,
1511
+ "grad_norm": 3.8679358959198,
1512
+ "learning_rate": 8.513450158049109e-08,
1513
+ "loss": 0.9782,
1514
+ "step": 215
1515
+ },
1516
+ {
1517
+ "epoch": 0.9473684210526315,
1518
+ "grad_norm": 4.188008785247803,
1519
+ "learning_rate": 7.257120198226219e-08,
1520
+ "loss": 0.9853,
1521
+ "step": 216
1522
+ },
1523
+ {
1524
+ "epoch": 0.9517543859649122,
1525
+ "grad_norm": 4.046226978302002,
1526
+ "learning_rate": 6.100360177619946e-08,
1527
+ "loss": 0.8929,
1528
+ "step": 217
1529
+ },
1530
+ {
1531
+ "epoch": 0.956140350877193,
1532
+ "grad_norm": 7.8870110511779785,
1533
+ "learning_rate": 5.0434038459801213e-08,
1534
+ "loss": 0.9297,
1535
+ "step": 218
1536
+ },
1537
+ {
1538
+ "epoch": 0.9605263157894737,
1539
+ "grad_norm": 3.9931716918945312,
1540
+ "learning_rate": 4.086464785444777e-08,
1541
+ "loss": 0.9525,
1542
+ "step": 219
1543
+ },
1544
+ {
1545
+ "epoch": 0.9649122807017544,
1546
+ "grad_norm": 3.876042604446411,
1547
+ "learning_rate": 3.229736367380498e-08,
1548
+ "loss": 0.9839,
1549
+ "step": 220
1550
+ },
1551
+ {
1552
+ "epoch": 0.9692982456140351,
1553
+ "grad_norm": 4.057157516479492,
1554
+ "learning_rate": 2.4733917133077378e-08,
1555
+ "loss": 0.9349,
1556
+ "step": 221
1557
+ },
1558
+ {
1559
+ "epoch": 0.9736842105263158,
1560
+ "grad_norm": 3.735743522644043,
1561
+ "learning_rate": 1.8175836599173545e-08,
1562
+ "loss": 0.9716,
1563
+ "step": 222
1564
+ },
1565
+ {
1566
+ "epoch": 0.9780701754385965,
1567
+ "grad_norm": 3.7397193908691406,
1568
+ "learning_rate": 1.2624447281867625e-08,
1569
+ "loss": 0.9661,
1570
+ "step": 223
1571
+ },
1572
+ {
1573
+ "epoch": 0.9824561403508771,
1574
+ "grad_norm": 3.6132941246032715,
1575
+ "learning_rate": 8.080870966008513e-09,
1576
+ "loss": 0.9614,
1577
+ "step": 224
1578
+ },
1579
+ {
1580
+ "epoch": 0.9868421052631579,
1581
+ "grad_norm": 4.1698784828186035,
1582
+ "learning_rate": 4.546025784837316e-09,
1583
+ "loss": 0.9655,
1584
+ "step": 225
1585
+ },
1586
+ {
1587
+ "epoch": 0.9912280701754386,
1588
+ "grad_norm": 3.830890655517578,
1589
+ "learning_rate": 2.0206260344590724e-09,
1590
+ "loss": 0.9867,
1591
+ "step": 226
1592
+ },
1593
+ {
1594
+ "epoch": 0.9956140350877193,
1595
+ "grad_norm": 3.9525303840637207,
1596
+ "learning_rate": 5.051820295032262e-10,
1597
+ "loss": 0.9372,
1598
+ "step": 227
1599
+ },
1600
+ {
1601
+ "epoch": 1.0,
1602
+ "grad_norm": 4.012087345123291,
1603
+ "learning_rate": 0.0,
1604
+ "loss": 0.9237,
1605
+ "step": 228
1606
+ },
1607
+ {
1608
+ "epoch": 1.0,
1609
+ "step": 228,
1610
+ "total_flos": 1.387246770039292e+18,
1611
+ "train_loss": 1.0000714727661066,
1612
+ "train_runtime": 2851.6353,
1613
+ "train_samples_per_second": 20.461,
1614
+ "train_steps_per_second": 0.08
1615
+ }
1616
+ ],
1617
+ "logging_steps": 1.0,
1618
+ "max_steps": 228,
1619
+ "num_input_tokens_seen": 0,
1620
+ "num_train_epochs": 1,
1621
+ "save_steps": 500,
1622
+ "stateful_callbacks": {
1623
+ "TrainerControl": {
1624
+ "args": {
1625
+ "should_epoch_stop": false,
1626
+ "should_evaluate": false,
1627
+ "should_log": false,
1628
+ "should_save": true,
1629
+ "should_training_stop": true
1630
+ },
1631
+ "attributes": {}
1632
+ }
1633
+ },
1634
+ "total_flos": 1.387246770039292e+18,
1635
+ "train_batch_size": 4,
1636
+ "trial_name": null,
1637
+ "trial_params": null
1638
+ }
training_loss.png ADDED