sedrickkeh commited on
Commit
d500ad6
·
verified ·
1 Parent(s): bb55b8f

Training in progress, epoch 0

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-7B-Instruct",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.46.1",
26
+ "use_cache": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfcfd0ab6a9e6aa6aa21cbe4e93e8e27f8489442f55250928b6c9ae31996e3d8
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a9825f4382035346759606f2999a0452f4084f0c12c65ebfb3aeef8eb13d564
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4480bf5102ca4aa3789e8eba82d38b4c231f185c61c9433396fb043d8d155ed1
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e6affa0843608867eff2ca382a9c5d7536dd3aa38e9a51355787b0b20a9255c
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|endoftext|>",
201
+ "errors": "replace",
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "padding_side": "right",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 1585, "loss": 1.0552, "lr": 2.5157232704402517e-07, "epoch": 0.003147128245476003, "percentage": 0.06, "elapsed_time": "0:00:50", "remaining_time": "22:18:37"}
2
+ {"current_steps": 2, "total_steps": 1585, "loss": 1.0526, "lr": 5.031446540880503e-07, "epoch": 0.006294256490952006, "percentage": 0.13, "elapsed_time": "0:01:38", "remaining_time": "21:36:38"}
3
+ {"current_steps": 3, "total_steps": 1585, "loss": 1.0558, "lr": 7.547169811320755e-07, "epoch": 0.00944138473642801, "percentage": 0.19, "elapsed_time": "0:02:23", "remaining_time": "21:03:23"}
4
+ {"current_steps": 4, "total_steps": 1585, "loss": 1.0581, "lr": 1.0062893081761007e-06, "epoch": 0.012588512981904013, "percentage": 0.25, "elapsed_time": "0:03:08", "remaining_time": "20:40:10"}
5
+ {"current_steps": 5, "total_steps": 1585, "loss": 1.0691, "lr": 1.257861635220126e-06, "epoch": 0.015735641227380016, "percentage": 0.32, "elapsed_time": "0:03:52", "remaining_time": "20:25:15"}
6
+ {"current_steps": 6, "total_steps": 1585, "loss": 1.0442, "lr": 1.509433962264151e-06, "epoch": 0.01888276947285602, "percentage": 0.38, "elapsed_time": "0:04:40", "remaining_time": "20:31:44"}
7
+ {"current_steps": 7, "total_steps": 1585, "loss": 1.0331, "lr": 1.7610062893081762e-06, "epoch": 0.022029897718332022, "percentage": 0.44, "elapsed_time": "0:05:26", "remaining_time": "20:26:30"}
8
+ {"current_steps": 8, "total_steps": 1585, "loss": 0.9988, "lr": 2.0125786163522013e-06, "epoch": 0.025177025963808025, "percentage": 0.5, "elapsed_time": "0:06:12", "remaining_time": "20:22:19"}
9
+ {"current_steps": 9, "total_steps": 1585, "loss": 0.9861, "lr": 2.2641509433962266e-06, "epoch": 0.02832415420928403, "percentage": 0.57, "elapsed_time": "0:06:57", "remaining_time": "20:17:49"}
10
+ {"current_steps": 10, "total_steps": 1585, "loss": 0.9606, "lr": 2.515723270440252e-06, "epoch": 0.03147128245476003, "percentage": 0.63, "elapsed_time": "0:07:44", "remaining_time": "20:18:15"}
11
+ {"current_steps": 11, "total_steps": 1585, "loss": 0.9645, "lr": 2.767295597484277e-06, "epoch": 0.03461841070023604, "percentage": 0.69, "elapsed_time": "0:08:31", "remaining_time": "20:19:05"}
12
+ {"current_steps": 12, "total_steps": 1585, "loss": 0.9743, "lr": 3.018867924528302e-06, "epoch": 0.03776553894571204, "percentage": 0.76, "elapsed_time": "0:09:15", "remaining_time": "20:13:00"}
13
+ {"current_steps": 13, "total_steps": 1585, "loss": 0.9278, "lr": 3.270440251572327e-06, "epoch": 0.040912667191188044, "percentage": 0.82, "elapsed_time": "0:10:02", "remaining_time": "20:13:43"}
14
+ {"current_steps": 14, "total_steps": 1585, "loss": 0.9396, "lr": 3.5220125786163524e-06, "epoch": 0.044059795436664044, "percentage": 0.88, "elapsed_time": "0:10:46", "remaining_time": "20:08:35"}
15
+ {"current_steps": 15, "total_steps": 1585, "loss": 0.9377, "lr": 3.7735849056603777e-06, "epoch": 0.04720692368214005, "percentage": 0.95, "elapsed_time": "0:11:30", "remaining_time": "20:04:46"}
16
+ {"current_steps": 16, "total_steps": 1585, "loss": 0.9249, "lr": 4.025157232704403e-06, "epoch": 0.05035405192761605, "percentage": 1.01, "elapsed_time": "0:12:17", "remaining_time": "20:05:49"}
17
+ {"current_steps": 17, "total_steps": 1585, "loss": 0.9276, "lr": 4.276729559748428e-06, "epoch": 0.05350118017309206, "percentage": 1.07, "elapsed_time": "0:13:04", "remaining_time": "20:06:02"}
18
+ {"current_steps": 18, "total_steps": 1585, "loss": 0.8794, "lr": 4.528301886792453e-06, "epoch": 0.05664830841856806, "percentage": 1.14, "elapsed_time": "0:13:51", "remaining_time": "20:06:26"}
19
+ {"current_steps": 19, "total_steps": 1585, "loss": 0.8753, "lr": 4.779874213836478e-06, "epoch": 0.05979543666404406, "percentage": 1.2, "elapsed_time": "0:14:36", "remaining_time": "20:03:45"}
20
+ {"current_steps": 20, "total_steps": 1585, "loss": 0.8624, "lr": 5.031446540880504e-06, "epoch": 0.06294256490952006, "percentage": 1.26, "elapsed_time": "0:15:14", "remaining_time": "19:52:11"}
21
+ {"current_steps": 21, "total_steps": 1585, "loss": 0.8442, "lr": 5.283018867924529e-06, "epoch": 0.06608969315499606, "percentage": 1.32, "elapsed_time": "0:15:58", "remaining_time": "19:50:02"}
22
+ {"current_steps": 22, "total_steps": 1585, "loss": 0.8329, "lr": 5.534591194968554e-06, "epoch": 0.06923682140047208, "percentage": 1.39, "elapsed_time": "0:16:41", "remaining_time": "19:46:11"}
23
+ {"current_steps": 23, "total_steps": 1585, "loss": 0.8175, "lr": 5.786163522012579e-06, "epoch": 0.07238394964594808, "percentage": 1.45, "elapsed_time": "0:17:32", "remaining_time": "19:50:48"}
24
+ {"current_steps": 24, "total_steps": 1585, "loss": 0.8111, "lr": 6.037735849056604e-06, "epoch": 0.07553107789142408, "percentage": 1.51, "elapsed_time": "0:18:16", "remaining_time": "19:49:00"}
25
+ {"current_steps": 25, "total_steps": 1585, "loss": 0.8, "lr": 6.289308176100629e-06, "epoch": 0.07867820613690008, "percentage": 1.58, "elapsed_time": "0:19:01", "remaining_time": "19:46:53"}
26
+ {"current_steps": 26, "total_steps": 1585, "loss": 0.7959, "lr": 6.540880503144654e-06, "epoch": 0.08182533438237609, "percentage": 1.64, "elapsed_time": "0:19:46", "remaining_time": "19:45:40"}
27
+ {"current_steps": 27, "total_steps": 1585, "loss": 0.794, "lr": 6.792452830188679e-06, "epoch": 0.08497246262785209, "percentage": 1.7, "elapsed_time": "0:20:30", "remaining_time": "19:43:20"}
28
+ {"current_steps": 28, "total_steps": 1585, "loss": 0.7739, "lr": 7.044025157232705e-06, "epoch": 0.08811959087332809, "percentage": 1.77, "elapsed_time": "0:21:18", "remaining_time": "19:44:44"}
29
+ {"current_steps": 29, "total_steps": 1585, "loss": 0.7886, "lr": 7.29559748427673e-06, "epoch": 0.09126671911880409, "percentage": 1.83, "elapsed_time": "0:22:03", "remaining_time": "19:43:37"}
30
+ {"current_steps": 30, "total_steps": 1585, "loss": 0.7766, "lr": 7.5471698113207555e-06, "epoch": 0.0944138473642801, "percentage": 1.89, "elapsed_time": "0:22:50", "remaining_time": "19:43:43"}
31
+ {"current_steps": 31, "total_steps": 1585, "loss": 0.7634, "lr": 7.79874213836478e-06, "epoch": 0.0975609756097561, "percentage": 1.96, "elapsed_time": "0:23:37", "remaining_time": "19:44:03"}
32
+ {"current_steps": 32, "total_steps": 1585, "loss": 0.7609, "lr": 8.050314465408805e-06, "epoch": 0.1007081038552321, "percentage": 2.02, "elapsed_time": "0:24:20", "remaining_time": "19:41:25"}
33
+ {"current_steps": 33, "total_steps": 1585, "loss": 0.7633, "lr": 8.301886792452832e-06, "epoch": 0.1038552321007081, "percentage": 2.08, "elapsed_time": "0:25:01", "remaining_time": "19:36:46"}
34
+ {"current_steps": 34, "total_steps": 1585, "loss": 0.7442, "lr": 8.553459119496857e-06, "epoch": 0.10700236034618411, "percentage": 2.15, "elapsed_time": "0:25:46", "remaining_time": "19:35:42"}
35
+ {"current_steps": 35, "total_steps": 1585, "loss": 0.7424, "lr": 8.805031446540882e-06, "epoch": 0.11014948859166011, "percentage": 2.21, "elapsed_time": "0:26:30", "remaining_time": "19:33:41"}
36
+ {"current_steps": 36, "total_steps": 1585, "loss": 0.7356, "lr": 9.056603773584907e-06, "epoch": 0.11329661683713611, "percentage": 2.27, "elapsed_time": "0:27:18", "remaining_time": "19:34:46"}
37
+ {"current_steps": 37, "total_steps": 1585, "loss": 0.7439, "lr": 9.308176100628931e-06, "epoch": 0.11644374508261211, "percentage": 2.33, "elapsed_time": "0:28:02", "remaining_time": "19:33:27"}
38
+ {"current_steps": 38, "total_steps": 1585, "loss": 0.7392, "lr": 9.559748427672956e-06, "epoch": 0.11959087332808813, "percentage": 2.4, "elapsed_time": "0:28:48", "remaining_time": "19:32:52"}
39
+ {"current_steps": 39, "total_steps": 1585, "loss": 0.7362, "lr": 9.811320754716981e-06, "epoch": 0.12273800157356413, "percentage": 2.46, "elapsed_time": "0:29:35", "remaining_time": "19:33:12"}
40
+ {"current_steps": 40, "total_steps": 1585, "loss": 0.7282, "lr": 1.0062893081761008e-05, "epoch": 0.12588512981904013, "percentage": 2.52, "elapsed_time": "0:30:20", "remaining_time": "19:31:56"}
41
+ {"current_steps": 41, "total_steps": 1585, "loss": 0.7209, "lr": 1.0314465408805033e-05, "epoch": 0.12903225806451613, "percentage": 2.59, "elapsed_time": "0:31:08", "remaining_time": "19:32:59"}
42
+ {"current_steps": 42, "total_steps": 1585, "loss": 0.7276, "lr": 1.0566037735849058e-05, "epoch": 0.13217938630999213, "percentage": 2.65, "elapsed_time": "0:31:55", "remaining_time": "19:32:39"}
43
+ {"current_steps": 43, "total_steps": 1585, "loss": 0.7381, "lr": 1.0817610062893083e-05, "epoch": 0.13532651455546812, "percentage": 2.71, "elapsed_time": "0:32:36", "remaining_time": "19:29:22"}
44
+ {"current_steps": 44, "total_steps": 1585, "loss": 0.7269, "lr": 1.1069182389937107e-05, "epoch": 0.13847364280094415, "percentage": 2.78, "elapsed_time": "0:33:21", "remaining_time": "19:28:32"}
45
+ {"current_steps": 45, "total_steps": 1585, "loss": 0.7096, "lr": 1.1320754716981132e-05, "epoch": 0.14162077104642015, "percentage": 2.84, "elapsed_time": "0:34:08", "remaining_time": "19:28:38"}
46
+ {"current_steps": 46, "total_steps": 1585, "loss": 0.7218, "lr": 1.1572327044025157e-05, "epoch": 0.14476789929189615, "percentage": 2.9, "elapsed_time": "0:34:56", "remaining_time": "19:29:01"}
47
+ {"current_steps": 47, "total_steps": 1585, "loss": 0.7251, "lr": 1.1823899371069182e-05, "epoch": 0.14791502753737215, "percentage": 2.97, "elapsed_time": "0:35:40", "remaining_time": "19:27:19"}
48
+ {"current_steps": 48, "total_steps": 1585, "loss": 0.7137, "lr": 1.2075471698113209e-05, "epoch": 0.15106215578284815, "percentage": 3.03, "elapsed_time": "0:36:25", "remaining_time": "19:26:13"}
49
+ {"current_steps": 49, "total_steps": 1585, "loss": 0.7081, "lr": 1.2327044025157234e-05, "epoch": 0.15420928402832415, "percentage": 3.09, "elapsed_time": "0:37:13", "remaining_time": "19:26:57"}
50
+ {"current_steps": 50, "total_steps": 1585, "loss": 0.6987, "lr": 1.2578616352201259e-05, "epoch": 0.15735641227380015, "percentage": 3.15, "elapsed_time": "0:37:58", "remaining_time": "19:26:02"}
51
+ {"current_steps": 51, "total_steps": 1585, "loss": 0.7063, "lr": 1.2830188679245283e-05, "epoch": 0.16050354051927615, "percentage": 3.22, "elapsed_time": "0:38:46", "remaining_time": "19:26:23"}
52
+ {"current_steps": 52, "total_steps": 1585, "loss": 0.6963, "lr": 1.3081761006289308e-05, "epoch": 0.16365066876475218, "percentage": 3.28, "elapsed_time": "0:39:32", "remaining_time": "19:25:43"}
53
+ {"current_steps": 53, "total_steps": 1585, "loss": 0.7056, "lr": 1.3333333333333333e-05, "epoch": 0.16679779701022818, "percentage": 3.34, "elapsed_time": "0:40:17", "remaining_time": "19:24:36"}
54
+ {"current_steps": 54, "total_steps": 1585, "loss": 0.7008, "lr": 1.3584905660377358e-05, "epoch": 0.16994492525570418, "percentage": 3.41, "elapsed_time": "0:41:01", "remaining_time": "19:22:57"}
55
+ {"current_steps": 55, "total_steps": 1585, "loss": 0.6996, "lr": 1.3836477987421383e-05, "epoch": 0.17309205350118018, "percentage": 3.47, "elapsed_time": "0:41:46", "remaining_time": "19:22:18"}
56
+ {"current_steps": 56, "total_steps": 1585, "loss": 0.7102, "lr": 1.408805031446541e-05, "epoch": 0.17623918174665618, "percentage": 3.53, "elapsed_time": "0:42:30", "remaining_time": "19:20:32"}
57
+ {"current_steps": 57, "total_steps": 1585, "loss": 0.6938, "lr": 1.4339622641509435e-05, "epoch": 0.17938630999213218, "percentage": 3.6, "elapsed_time": "0:43:17", "remaining_time": "19:20:37"}
58
+ {"current_steps": 58, "total_steps": 1585, "loss": 0.6961, "lr": 1.459119496855346e-05, "epoch": 0.18253343823760818, "percentage": 3.66, "elapsed_time": "0:44:03", "remaining_time": "19:20:00"}
59
+ {"current_steps": 59, "total_steps": 1585, "loss": 0.6902, "lr": 1.4842767295597484e-05, "epoch": 0.18568056648308418, "percentage": 3.72, "elapsed_time": "0:44:48", "remaining_time": "19:19:08"}
60
+ {"current_steps": 60, "total_steps": 1585, "loss": 0.6829, "lr": 1.5094339622641511e-05, "epoch": 0.1888276947285602, "percentage": 3.79, "elapsed_time": "0:45:36", "remaining_time": "19:19:10"}
61
+ {"current_steps": 61, "total_steps": 1585, "loss": 0.6968, "lr": 1.5345911949685536e-05, "epoch": 0.1919748229740362, "percentage": 3.85, "elapsed_time": "0:46:22", "remaining_time": "19:18:27"}
62
+ {"current_steps": 62, "total_steps": 1585, "loss": 0.6858, "lr": 1.559748427672956e-05, "epoch": 0.1951219512195122, "percentage": 3.91, "elapsed_time": "0:47:09", "remaining_time": "19:18:17"}
63
+ {"current_steps": 63, "total_steps": 1585, "loss": 0.6932, "lr": 1.5849056603773586e-05, "epoch": 0.1982690794649882, "percentage": 3.97, "elapsed_time": "0:47:56", "remaining_time": "19:18:08"}
64
+ {"current_steps": 64, "total_steps": 1585, "loss": 0.6897, "lr": 1.610062893081761e-05, "epoch": 0.2014162077104642, "percentage": 4.04, "elapsed_time": "0:48:40", "remaining_time": "19:16:53"}
65
+ {"current_steps": 65, "total_steps": 1585, "loss": 0.6874, "lr": 1.635220125786164e-05, "epoch": 0.2045633359559402, "percentage": 4.1, "elapsed_time": "0:49:29", "remaining_time": "19:17:17"}
66
+ {"current_steps": 66, "total_steps": 1585, "loss": 0.6831, "lr": 1.6603773584905664e-05, "epoch": 0.2077104642014162, "percentage": 4.16, "elapsed_time": "0:50:16", "remaining_time": "19:16:57"}
67
+ {"current_steps": 67, "total_steps": 1585, "loss": 0.6687, "lr": 1.685534591194969e-05, "epoch": 0.2108575924468922, "percentage": 4.23, "elapsed_time": "0:50:58", "remaining_time": "19:14:45"}
68
+ {"current_steps": 68, "total_steps": 1585, "loss": 0.6834, "lr": 1.7106918238993714e-05, "epoch": 0.21400472069236823, "percentage": 4.29, "elapsed_time": "0:51:44", "remaining_time": "19:14:15"}
69
+ {"current_steps": 69, "total_steps": 1585, "loss": 0.6951, "lr": 1.735849056603774e-05, "epoch": 0.21715184893784423, "percentage": 4.35, "elapsed_time": "0:52:28", "remaining_time": "19:12:55"}
70
+ {"current_steps": 70, "total_steps": 1585, "loss": 0.6847, "lr": 1.7610062893081763e-05, "epoch": 0.22029897718332023, "percentage": 4.42, "elapsed_time": "0:53:14", "remaining_time": "19:12:11"}
71
+ {"current_steps": 71, "total_steps": 1585, "loss": 0.6774, "lr": 1.7861635220125788e-05, "epoch": 0.22344610542879623, "percentage": 4.48, "elapsed_time": "0:54:01", "remaining_time": "19:12:08"}
72
+ {"current_steps": 72, "total_steps": 1585, "loss": 0.6708, "lr": 1.8113207547169813e-05, "epoch": 0.22659323367427223, "percentage": 4.54, "elapsed_time": "0:54:47", "remaining_time": "19:11:15"}
73
+ {"current_steps": 73, "total_steps": 1585, "loss": 0.6706, "lr": 1.8364779874213838e-05, "epoch": 0.22974036191974823, "percentage": 4.61, "elapsed_time": "0:55:25", "remaining_time": "19:08:06"}
74
+ {"current_steps": 74, "total_steps": 1585, "loss": 0.688, "lr": 1.8616352201257863e-05, "epoch": 0.23288749016522423, "percentage": 4.67, "elapsed_time": "0:56:11", "remaining_time": "19:07:32"}
75
+ {"current_steps": 75, "total_steps": 1585, "loss": 0.6856, "lr": 1.8867924528301888e-05, "epoch": 0.23603461841070023, "percentage": 4.73, "elapsed_time": "0:56:59", "remaining_time": "19:07:23"}
76
+ {"current_steps": 76, "total_steps": 1585, "loss": 0.6743, "lr": 1.9119496855345913e-05, "epoch": 0.23918174665617625, "percentage": 4.79, "elapsed_time": "0:57:45", "remaining_time": "19:06:57"}
77
+ {"current_steps": 77, "total_steps": 1585, "loss": 0.6769, "lr": 1.9371069182389938e-05, "epoch": 0.24232887490165225, "percentage": 4.86, "elapsed_time": "0:58:31", "remaining_time": "19:06:17"}
78
+ {"current_steps": 78, "total_steps": 1585, "loss": 0.6673, "lr": 1.9622641509433963e-05, "epoch": 0.24547600314712825, "percentage": 4.92, "elapsed_time": "0:59:18", "remaining_time": "19:06:00"}
79
+ {"current_steps": 79, "total_steps": 1585, "loss": 0.6669, "lr": 1.9874213836477987e-05, "epoch": 0.24862313139260425, "percentage": 4.98, "elapsed_time": "1:00:03", "remaining_time": "19:04:50"}
80
+ {"current_steps": 80, "total_steps": 1585, "loss": 0.6585, "lr": 2.0125786163522016e-05, "epoch": 0.25177025963808025, "percentage": 5.05, "elapsed_time": "1:00:45", "remaining_time": "19:03:04"}
81
+ {"current_steps": 81, "total_steps": 1585, "loss": 0.6689, "lr": 2.037735849056604e-05, "epoch": 0.2549173878835563, "percentage": 5.11, "elapsed_time": "1:01:29", "remaining_time": "19:01:50"}
82
+ {"current_steps": 82, "total_steps": 1585, "loss": 0.6666, "lr": 2.0628930817610066e-05, "epoch": 0.25806451612903225, "percentage": 5.17, "elapsed_time": "1:02:15", "remaining_time": "19:01:07"}
83
+ {"current_steps": 83, "total_steps": 1585, "loss": 0.6605, "lr": 2.088050314465409e-05, "epoch": 0.2612116443745083, "percentage": 5.24, "elapsed_time": "1:03:03", "remaining_time": "19:01:07"}
84
+ {"current_steps": 84, "total_steps": 1585, "loss": 0.6777, "lr": 2.1132075471698115e-05, "epoch": 0.26435877261998425, "percentage": 5.3, "elapsed_time": "1:03:48", "remaining_time": "19:00:14"}
85
+ {"current_steps": 85, "total_steps": 1585, "loss": 0.6628, "lr": 2.138364779874214e-05, "epoch": 0.2675059008654603, "percentage": 5.36, "elapsed_time": "1:04:34", "remaining_time": "18:59:24"}
86
+ {"current_steps": 86, "total_steps": 1585, "loss": 0.6684, "lr": 2.1635220125786165e-05, "epoch": 0.27065302911093625, "percentage": 5.43, "elapsed_time": "1:05:20", "remaining_time": "18:58:52"}
87
+ {"current_steps": 87, "total_steps": 1585, "loss": 0.6644, "lr": 2.188679245283019e-05, "epoch": 0.2738001573564123, "percentage": 5.49, "elapsed_time": "1:06:08", "remaining_time": "18:58:43"}
88
+ {"current_steps": 88, "total_steps": 1585, "loss": 0.6621, "lr": 2.2138364779874215e-05, "epoch": 0.2769472856018883, "percentage": 5.55, "elapsed_time": "1:06:55", "remaining_time": "18:58:20"}
89
+ {"current_steps": 89, "total_steps": 1585, "loss": 0.6546, "lr": 2.238993710691824e-05, "epoch": 0.2800944138473643, "percentage": 5.62, "elapsed_time": "1:07:41", "remaining_time": "18:57:57"}
90
+ {"current_steps": 90, "total_steps": 1585, "loss": 0.66, "lr": 2.2641509433962265e-05, "epoch": 0.2832415420928403, "percentage": 5.68, "elapsed_time": "1:08:27", "remaining_time": "18:57:05"}
91
+ {"current_steps": 91, "total_steps": 1585, "loss": 0.6592, "lr": 2.289308176100629e-05, "epoch": 0.2863886703383163, "percentage": 5.74, "elapsed_time": "1:09:09", "remaining_time": "18:55:18"}
92
+ {"current_steps": 92, "total_steps": 1585, "loss": 0.6721, "lr": 2.3144654088050315e-05, "epoch": 0.2895357985837923, "percentage": 5.8, "elapsed_time": "1:09:51", "remaining_time": "18:53:38"}
93
+ {"current_steps": 93, "total_steps": 1585, "loss": 0.6659, "lr": 2.339622641509434e-05, "epoch": 0.2926829268292683, "percentage": 5.87, "elapsed_time": "1:10:35", "remaining_time": "18:52:28"}
94
+ {"current_steps": 94, "total_steps": 1585, "loss": 0.6707, "lr": 2.3647798742138364e-05, "epoch": 0.2958300550747443, "percentage": 5.93, "elapsed_time": "1:11:21", "remaining_time": "18:51:51"}
95
+ {"current_steps": 95, "total_steps": 1585, "loss": 0.6717, "lr": 2.3899371069182393e-05, "epoch": 0.2989771833202203, "percentage": 5.99, "elapsed_time": "1:12:08", "remaining_time": "18:51:27"}
96
+ {"current_steps": 96, "total_steps": 1585, "loss": 0.6665, "lr": 2.4150943396226418e-05, "epoch": 0.3021243115656963, "percentage": 6.06, "elapsed_time": "1:12:54", "remaining_time": "18:50:53"}
97
+ {"current_steps": 97, "total_steps": 1585, "loss": 0.6639, "lr": 2.4402515723270442e-05, "epoch": 0.30527143981117233, "percentage": 6.12, "elapsed_time": "1:13:39", "remaining_time": "18:49:59"}
98
+ {"current_steps": 98, "total_steps": 1585, "loss": 0.6601, "lr": 2.4654088050314467e-05, "epoch": 0.3084185680566483, "percentage": 6.18, "elapsed_time": "1:14:24", "remaining_time": "18:49:00"}
99
+ {"current_steps": 99, "total_steps": 1585, "loss": 0.6545, "lr": 2.4905660377358492e-05, "epoch": 0.31156569630212433, "percentage": 6.25, "elapsed_time": "1:15:10", "remaining_time": "18:48:22"}
100
+ {"current_steps": 100, "total_steps": 1585, "loss": 0.6476, "lr": 2.5157232704402517e-05, "epoch": 0.3147128245476003, "percentage": 6.31, "elapsed_time": "1:15:54", "remaining_time": "18:47:17"}
101
+ {"current_steps": 101, "total_steps": 1585, "loss": 0.6689, "lr": 2.5408805031446542e-05, "epoch": 0.31785995279307633, "percentage": 6.37, "elapsed_time": "1:16:36", "remaining_time": "18:45:38"}
102
+ {"current_steps": 102, "total_steps": 1585, "loss": 0.6667, "lr": 2.5660377358490567e-05, "epoch": 0.3210070810385523, "percentage": 6.44, "elapsed_time": "1:17:20", "remaining_time": "18:44:33"}
103
+ {"current_steps": 103, "total_steps": 1585, "loss": 0.6485, "lr": 2.5911949685534592e-05, "epoch": 0.3241542092840283, "percentage": 6.5, "elapsed_time": "1:18:04", "remaining_time": "18:43:28"}
104
+ {"current_steps": 104, "total_steps": 1585, "loss": 0.6492, "lr": 2.6163522012578617e-05, "epoch": 0.32730133752950435, "percentage": 6.56, "elapsed_time": "1:18:50", "remaining_time": "18:42:40"}
105
+ {"current_steps": 105, "total_steps": 1585, "loss": 0.6565, "lr": 2.641509433962264e-05, "epoch": 0.3304484657749803, "percentage": 6.62, "elapsed_time": "1:19:35", "remaining_time": "18:41:54"}
106
+ {"current_steps": 106, "total_steps": 1585, "loss": 0.6523, "lr": 2.6666666666666667e-05, "epoch": 0.33359559402045635, "percentage": 6.69, "elapsed_time": "1:20:17", "remaining_time": "18:40:11"}
107
+ {"current_steps": 107, "total_steps": 1585, "loss": 0.6556, "lr": 2.691823899371069e-05, "epoch": 0.3367427222659323, "percentage": 6.75, "elapsed_time": "1:21:02", "remaining_time": "18:39:21"}
108
+ {"current_steps": 108, "total_steps": 1585, "loss": 0.6572, "lr": 2.7169811320754716e-05, "epoch": 0.33988985051140835, "percentage": 6.81, "elapsed_time": "1:21:47", "remaining_time": "18:38:29"}
109
+ {"current_steps": 109, "total_steps": 1585, "loss": 0.6451, "lr": 2.742138364779874e-05, "epoch": 0.3430369787568843, "percentage": 6.88, "elapsed_time": "1:22:33", "remaining_time": "18:37:55"}
110
+ {"current_steps": 110, "total_steps": 1585, "loss": 0.6479, "lr": 2.7672955974842766e-05, "epoch": 0.34618410700236035, "percentage": 6.94, "elapsed_time": "1:23:21", "remaining_time": "18:37:47"}
111
+ {"current_steps": 111, "total_steps": 1585, "loss": 0.6537, "lr": 2.7924528301886794e-05, "epoch": 0.3493312352478363, "percentage": 7.0, "elapsed_time": "1:24:05", "remaining_time": "18:36:47"}
112
+ {"current_steps": 112, "total_steps": 1585, "loss": 0.6632, "lr": 2.817610062893082e-05, "epoch": 0.35247836349331235, "percentage": 7.07, "elapsed_time": "1:24:49", "remaining_time": "18:35:37"}
113
+ {"current_steps": 113, "total_steps": 1585, "loss": 0.6588, "lr": 2.8427672955974844e-05, "epoch": 0.3556254917387884, "percentage": 7.13, "elapsed_time": "1:25:36", "remaining_time": "18:35:05"}
114
+ {"current_steps": 114, "total_steps": 1585, "loss": 0.6566, "lr": 2.867924528301887e-05, "epoch": 0.35877261998426435, "percentage": 7.19, "elapsed_time": "1:26:23", "remaining_time": "18:34:44"}
115
+ {"current_steps": 115, "total_steps": 1585, "loss": 0.6536, "lr": 2.8930817610062894e-05, "epoch": 0.3619197482297404, "percentage": 7.26, "elapsed_time": "1:27:08", "remaining_time": "18:33:48"}
116
+ {"current_steps": 116, "total_steps": 1585, "loss": 0.6516, "lr": 2.918238993710692e-05, "epoch": 0.36506687647521635, "percentage": 7.32, "elapsed_time": "1:27:55", "remaining_time": "18:33:26"}
117
+ {"current_steps": 117, "total_steps": 1585, "loss": 0.643, "lr": 2.9433962264150944e-05, "epoch": 0.3682140047206924, "percentage": 7.38, "elapsed_time": "1:28:39", "remaining_time": "18:32:25"}
118
+ {"current_steps": 118, "total_steps": 1585, "loss": 0.6621, "lr": 2.968553459119497e-05, "epoch": 0.37136113296616835, "percentage": 7.44, "elapsed_time": "1:29:27", "remaining_time": "18:32:06"}
119
+ {"current_steps": 119, "total_steps": 1585, "loss": 0.6594, "lr": 2.9937106918238994e-05, "epoch": 0.3745082612116444, "percentage": 7.51, "elapsed_time": "1:30:13", "remaining_time": "18:31:35"}
120
+ {"current_steps": 120, "total_steps": 1585, "loss": 0.6614, "lr": 3.0188679245283022e-05, "epoch": 0.3776553894571204, "percentage": 7.57, "elapsed_time": "1:31:00", "remaining_time": "18:31:09"}
121
+ {"current_steps": 121, "total_steps": 1585, "loss": 0.6586, "lr": 3.044025157232705e-05, "epoch": 0.3808025177025964, "percentage": 7.63, "elapsed_time": "1:31:48", "remaining_time": "18:30:47"}
122
+ {"current_steps": 122, "total_steps": 1585, "loss": 0.6587, "lr": 3.069182389937107e-05, "epoch": 0.3839496459480724, "percentage": 7.7, "elapsed_time": "1:32:33", "remaining_time": "18:29:59"}
123
+ {"current_steps": 123, "total_steps": 1585, "loss": 0.6541, "lr": 3.09433962264151e-05, "epoch": 0.3870967741935484, "percentage": 7.76, "elapsed_time": "1:33:20", "remaining_time": "18:29:29"}
124
+ {"current_steps": 124, "total_steps": 1585, "loss": 0.649, "lr": 3.119496855345912e-05, "epoch": 0.3902439024390244, "percentage": 7.82, "elapsed_time": "1:34:07", "remaining_time": "18:28:56"}
125
+ {"current_steps": 125, "total_steps": 1585, "loss": 0.6472, "lr": 3.144654088050315e-05, "epoch": 0.3933910306845004, "percentage": 7.89, "elapsed_time": "1:34:52", "remaining_time": "18:28:13"}
126
+ {"current_steps": 126, "total_steps": 1585, "loss": 0.6649, "lr": 3.169811320754717e-05, "epoch": 0.3965381589299764, "percentage": 7.95, "elapsed_time": "1:35:39", "remaining_time": "18:27:43"}
127
+ {"current_steps": 127, "total_steps": 1585, "loss": 0.6544, "lr": 3.19496855345912e-05, "epoch": 0.3996852871754524, "percentage": 8.01, "elapsed_time": "1:36:26", "remaining_time": "18:27:06"}
128
+ {"current_steps": 128, "total_steps": 1585, "loss": 0.6511, "lr": 3.220125786163522e-05, "epoch": 0.4028324154209284, "percentage": 8.08, "elapsed_time": "1:37:13", "remaining_time": "18:26:47"}
129
+ {"current_steps": 129, "total_steps": 1585, "loss": 0.6435, "lr": 3.245283018867925e-05, "epoch": 0.40597954366640443, "percentage": 8.14, "elapsed_time": "1:38:00", "remaining_time": "18:26:16"}
130
+ {"current_steps": 130, "total_steps": 1585, "loss": 0.6493, "lr": 3.270440251572328e-05, "epoch": 0.4091266719118804, "percentage": 8.2, "elapsed_time": "1:38:45", "remaining_time": "18:25:22"}
131
+ {"current_steps": 131, "total_steps": 1585, "loss": 0.6535, "lr": 3.29559748427673e-05, "epoch": 0.41227380015735643, "percentage": 8.26, "elapsed_time": "1:39:31", "remaining_time": "18:24:40"}
132
+ {"current_steps": 132, "total_steps": 1585, "loss": 0.6434, "lr": 3.320754716981133e-05, "epoch": 0.4154209284028324, "percentage": 8.33, "elapsed_time": "1:40:18", "remaining_time": "18:24:04"}
133
+ {"current_steps": 133, "total_steps": 1585, "loss": 0.6584, "lr": 3.345911949685535e-05, "epoch": 0.41856805664830843, "percentage": 8.39, "elapsed_time": "1:41:05", "remaining_time": "18:23:40"}
134
+ {"current_steps": 134, "total_steps": 1585, "loss": 0.6458, "lr": 3.371069182389938e-05, "epoch": 0.4217151848937844, "percentage": 8.45, "elapsed_time": "1:41:52", "remaining_time": "18:23:03"}
135
+ {"current_steps": 135, "total_steps": 1585, "loss": 0.6447, "lr": 3.39622641509434e-05, "epoch": 0.42486231313926043, "percentage": 8.52, "elapsed_time": "1:42:36", "remaining_time": "18:22:10"}
136
+ {"current_steps": 136, "total_steps": 1585, "loss": 0.651, "lr": 3.421383647798743e-05, "epoch": 0.42800944138473646, "percentage": 8.58, "elapsed_time": "1:43:20", "remaining_time": "18:21:04"}
137
+ {"current_steps": 137, "total_steps": 1585, "loss": 0.6406, "lr": 3.446540880503145e-05, "epoch": 0.4311565696302124, "percentage": 8.64, "elapsed_time": "1:44:07", "remaining_time": "18:20:29"}
138
+ {"current_steps": 138, "total_steps": 1585, "loss": 0.649, "lr": 3.471698113207548e-05, "epoch": 0.43430369787568845, "percentage": 8.71, "elapsed_time": "1:44:52", "remaining_time": "18:19:36"}
139
+ {"current_steps": 139, "total_steps": 1585, "loss": 0.6433, "lr": 3.49685534591195e-05, "epoch": 0.4374508261211644, "percentage": 8.77, "elapsed_time": "1:45:39", "remaining_time": "18:19:13"}
140
+ {"current_steps": 140, "total_steps": 1585, "loss": 0.6442, "lr": 3.522012578616353e-05, "epoch": 0.44059795436664045, "percentage": 8.83, "elapsed_time": "1:46:28", "remaining_time": "18:18:53"}
141
+ {"current_steps": 141, "total_steps": 1585, "loss": 0.6329, "lr": 3.547169811320755e-05, "epoch": 0.4437450826121164, "percentage": 8.9, "elapsed_time": "1:47:14", "remaining_time": "18:18:17"}
142
+ {"current_steps": 142, "total_steps": 1585, "loss": 0.6503, "lr": 3.5723270440251577e-05, "epoch": 0.44689221085759245, "percentage": 8.96, "elapsed_time": "1:48:02", "remaining_time": "18:17:52"}
143
+ {"current_steps": 143, "total_steps": 1585, "loss": 0.6319, "lr": 3.59748427672956e-05, "epoch": 0.4500393391030684, "percentage": 9.02, "elapsed_time": "1:48:50", "remaining_time": "18:17:34"}
144
+ {"current_steps": 144, "total_steps": 1585, "loss": 0.6447, "lr": 3.6226415094339626e-05, "epoch": 0.45318646734854445, "percentage": 9.09, "elapsed_time": "1:49:37", "remaining_time": "18:16:58"}
145
+ {"current_steps": 145, "total_steps": 1585, "loss": 0.6449, "lr": 3.6477987421383655e-05, "epoch": 0.4563335955940205, "percentage": 9.15, "elapsed_time": "1:50:24", "remaining_time": "18:16:30"}
146
+ {"current_steps": 146, "total_steps": 1585, "loss": 0.6447, "lr": 3.6729559748427676e-05, "epoch": 0.45948072383949645, "percentage": 9.21, "elapsed_time": "1:51:08", "remaining_time": "18:15:27"}
147
+ {"current_steps": 147, "total_steps": 1585, "loss": 0.6473, "lr": 3.6981132075471704e-05, "epoch": 0.4626278520849725, "percentage": 9.27, "elapsed_time": "1:51:54", "remaining_time": "18:14:46"}
148
+ {"current_steps": 148, "total_steps": 1585, "loss": 0.6521, "lr": 3.7232704402515726e-05, "epoch": 0.46577498033044845, "percentage": 9.34, "elapsed_time": "1:52:40", "remaining_time": "18:14:00"}
149
+ {"current_steps": 149, "total_steps": 1585, "loss": 0.6424, "lr": 3.7484276729559754e-05, "epoch": 0.4689221085759245, "percentage": 9.4, "elapsed_time": "1:53:24", "remaining_time": "18:13:03"}
150
+ {"current_steps": 150, "total_steps": 1585, "loss": 0.6599, "lr": 3.7735849056603776e-05, "epoch": 0.47206923682140045, "percentage": 9.46, "elapsed_time": "1:54:08", "remaining_time": "18:11:53"}
151
+ {"current_steps": 151, "total_steps": 1585, "loss": 0.6365, "lr": 3.7987421383647804e-05, "epoch": 0.4752163650668765, "percentage": 9.53, "elapsed_time": "1:54:53", "remaining_time": "18:11:01"}
152
+ {"current_steps": 152, "total_steps": 1585, "loss": 0.6484, "lr": 3.8238993710691826e-05, "epoch": 0.4783634933123525, "percentage": 9.59, "elapsed_time": "1:55:38", "remaining_time": "18:10:17"}
153
+ {"current_steps": 153, "total_steps": 1585, "loss": 0.6529, "lr": 3.8490566037735854e-05, "epoch": 0.4815106215578285, "percentage": 9.65, "elapsed_time": "1:56:24", "remaining_time": "18:09:27"}
154
+ {"current_steps": 154, "total_steps": 1585, "loss": 0.6405, "lr": 3.8742138364779875e-05, "epoch": 0.4846577498033045, "percentage": 9.72, "elapsed_time": "1:57:06", "remaining_time": "18:08:07"}
155
+ {"current_steps": 155, "total_steps": 1585, "loss": 0.6465, "lr": 3.8993710691823904e-05, "epoch": 0.4878048780487805, "percentage": 9.78, "elapsed_time": "1:57:54", "remaining_time": "18:07:51"}
156
+ {"current_steps": 156, "total_steps": 1585, "loss": 0.6296, "lr": 3.9245283018867925e-05, "epoch": 0.4909520062942565, "percentage": 9.84, "elapsed_time": "1:58:37", "remaining_time": "18:06:38"}
157
+ {"current_steps": 157, "total_steps": 1585, "loss": 0.6464, "lr": 3.9496855345911953e-05, "epoch": 0.4940991345397325, "percentage": 9.91, "elapsed_time": "1:59:24", "remaining_time": "18:06:03"}
158
+ {"current_steps": 158, "total_steps": 1585, "loss": 0.6476, "lr": 3.9748427672955975e-05, "epoch": 0.4972462627852085, "percentage": 9.97, "elapsed_time": "2:00:09", "remaining_time": "18:05:11"}
159
+ {"current_steps": 159, "total_steps": 1585, "loss": 0.6451, "lr": 4e-05, "epoch": 0.5003933910306845, "percentage": 10.03, "elapsed_time": "2:00:51", "remaining_time": "18:03:53"}
160
+ {"current_steps": 160, "total_steps": 1585, "loss": 0.6457, "lr": 3.999995146438705e-05, "epoch": 0.5035405192761605, "percentage": 10.09, "elapsed_time": "2:01:37", "remaining_time": "18:03:13"}
161
+ {"current_steps": 161, "total_steps": 1585, "loss": 0.6307, "lr": 3.999980585778375e-05, "epoch": 0.5066876475216365, "percentage": 10.16, "elapsed_time": "2:02:19", "remaining_time": "18:02:00"}
162
+ {"current_steps": 162, "total_steps": 1585, "loss": 0.6365, "lr": 3.999956318089682e-05, "epoch": 0.5098347757671126, "percentage": 10.22, "elapsed_time": "2:03:08", "remaining_time": "18:01:42"}
163
+ {"current_steps": 163, "total_steps": 1585, "loss": 0.6618, "lr": 3.9999223434904104e-05, "epoch": 0.5129819040125885, "percentage": 10.28, "elapsed_time": "2:03:55", "remaining_time": "18:01:07"}
164
+ {"current_steps": 164, "total_steps": 1585, "loss": 0.6486, "lr": 3.9998786621454584e-05, "epoch": 0.5161290322580645, "percentage": 10.35, "elapsed_time": "2:04:43", "remaining_time": "18:00:41"}
165
+ {"current_steps": 165, "total_steps": 1585, "loss": 0.6344, "lr": 3.999825274266836e-05, "epoch": 0.5192761605035405, "percentage": 10.41, "elapsed_time": "2:05:25", "remaining_time": "17:59:28"}
166
+ {"current_steps": 166, "total_steps": 1585, "loss": 0.6578, "lr": 3.9997621801136645e-05, "epoch": 0.5224232887490166, "percentage": 10.47, "elapsed_time": "2:06:11", "remaining_time": "17:58:42"}
167
+ {"current_steps": 167, "total_steps": 1585, "loss": 0.6478, "lr": 3.999689379992174e-05, "epoch": 0.5255704169944925, "percentage": 10.54, "elapsed_time": "2:06:57", "remaining_time": "17:57:59"}
168
+ {"current_steps": 168, "total_steps": 1585, "loss": 0.634, "lr": 3.9996068742557065e-05, "epoch": 0.5287175452399685, "percentage": 10.6, "elapsed_time": "2:07:44", "remaining_time": "17:57:26"}
169
+ {"current_steps": 169, "total_steps": 1585, "loss": 0.6416, "lr": 3.999514663304708e-05, "epoch": 0.5318646734854445, "percentage": 10.66, "elapsed_time": "2:08:25", "remaining_time": "17:55:58"}
170
+ {"current_steps": 170, "total_steps": 1585, "loss": 0.6323, "lr": 3.999412747586729e-05, "epoch": 0.5350118017309206, "percentage": 10.73, "elapsed_time": "2:09:08", "remaining_time": "17:54:56"}
171
+ {"current_steps": 171, "total_steps": 1585, "loss": 0.6389, "lr": 3.999301127596425e-05, "epoch": 0.5381589299763966, "percentage": 10.79, "elapsed_time": "2:09:51", "remaining_time": "17:53:46"}
172
+ {"current_steps": 172, "total_steps": 1585, "loss": 0.6419, "lr": 3.9991798038755484e-05, "epoch": 0.5413060582218725, "percentage": 10.85, "elapsed_time": "2:10:37", "remaining_time": "17:53:04"}
173
+ {"current_steps": 173, "total_steps": 1585, "loss": 0.6285, "lr": 3.999048777012953e-05, "epoch": 0.5444531864673485, "percentage": 10.91, "elapsed_time": "2:11:24", "remaining_time": "17:52:31"}
174
+ {"current_steps": 174, "total_steps": 1585, "loss": 0.6471, "lr": 3.998908047644587e-05, "epoch": 0.5476003147128246, "percentage": 10.98, "elapsed_time": "2:12:08", "remaining_time": "17:51:34"}
175
+ {"current_steps": 175, "total_steps": 1585, "loss": 0.6403, "lr": 3.998757616453486e-05, "epoch": 0.5507474429583006, "percentage": 11.04, "elapsed_time": "2:12:54", "remaining_time": "17:50:53"}
176
+ {"current_steps": 176, "total_steps": 1585, "loss": 0.6396, "lr": 3.998597484169779e-05, "epoch": 0.5538945712037766, "percentage": 11.1, "elapsed_time": "2:13:39", "remaining_time": "17:50:04"}
177
+ {"current_steps": 177, "total_steps": 1585, "loss": 0.6381, "lr": 3.9984276515706764e-05, "epoch": 0.5570416994492525, "percentage": 11.17, "elapsed_time": "2:14:24", "remaining_time": "17:49:10"}
178
+ {"current_steps": 178, "total_steps": 1585, "loss": 0.6498, "lr": 3.998248119480473e-05, "epoch": 0.5601888276947286, "percentage": 11.23, "elapsed_time": "2:15:10", "remaining_time": "17:48:30"}
179
+ {"current_steps": 179, "total_steps": 1585, "loss": 0.6474, "lr": 3.998058888770537e-05, "epoch": 0.5633359559402046, "percentage": 11.29, "elapsed_time": "2:15:57", "remaining_time": "17:47:55"}
180
+ {"current_steps": 180, "total_steps": 1585, "loss": 0.6294, "lr": 3.997859960359313e-05, "epoch": 0.5664830841856806, "percentage": 11.36, "elapsed_time": "2:16:44", "remaining_time": "17:47:17"}
181
+ {"current_steps": 181, "total_steps": 1585, "loss": 0.6228, "lr": 3.997651335212311e-05, "epoch": 0.5696302124311565, "percentage": 11.42, "elapsed_time": "2:17:28", "remaining_time": "17:46:19"}
182
+ {"current_steps": 182, "total_steps": 1585, "loss": 0.6153, "lr": 3.997433014342106e-05, "epoch": 0.5727773406766326, "percentage": 11.48, "elapsed_time": "2:18:13", "remaining_time": "17:45:33"}
183
+ {"current_steps": 183, "total_steps": 1585, "loss": 0.65, "lr": 3.9972049988083323e-05, "epoch": 0.5759244689221086, "percentage": 11.55, "elapsed_time": "2:18:58", "remaining_time": "17:44:42"}
184
+ {"current_steps": 184, "total_steps": 1585, "loss": 0.6256, "lr": 3.9969672897176764e-05, "epoch": 0.5790715971675846, "percentage": 11.61, "elapsed_time": "2:19:44", "remaining_time": "17:43:58"}
185
+ {"current_steps": 185, "total_steps": 1585, "loss": 0.6324, "lr": 3.996719888223875e-05, "epoch": 0.5822187254130606, "percentage": 11.67, "elapsed_time": "2:20:26", "remaining_time": "17:42:47"}
186
+ {"current_steps": 186, "total_steps": 1585, "loss": 0.6452, "lr": 3.996462795527706e-05, "epoch": 0.5853658536585366, "percentage": 11.74, "elapsed_time": "2:21:09", "remaining_time": "17:41:43"}
187
+ {"current_steps": 187, "total_steps": 1585, "loss": 0.6371, "lr": 3.996196012876984e-05, "epoch": 0.5885129819040126, "percentage": 11.8, "elapsed_time": "2:21:57", "remaining_time": "17:41:12"}
188
+ {"current_steps": 188, "total_steps": 1585, "loss": 0.6432, "lr": 3.995919541566555e-05, "epoch": 0.5916601101494886, "percentage": 11.86, "elapsed_time": "2:22:42", "remaining_time": "17:40:26"}
189
+ {"current_steps": 189, "total_steps": 1585, "loss": 0.6261, "lr": 3.995633382938291e-05, "epoch": 0.5948072383949646, "percentage": 11.92, "elapsed_time": "2:23:25", "remaining_time": "17:39:19"}
190
+ {"current_steps": 190, "total_steps": 1585, "loss": 0.6347, "lr": 3.995337538381079e-05, "epoch": 0.5979543666404405, "percentage": 11.99, "elapsed_time": "2:24:11", "remaining_time": "17:38:36"}
191
+ {"current_steps": 191, "total_steps": 1585, "loss": 0.6358, "lr": 3.9950320093308185e-05, "epoch": 0.6011014948859166, "percentage": 12.05, "elapsed_time": "2:24:56", "remaining_time": "17:37:47"}
192
+ {"current_steps": 192, "total_steps": 1585, "loss": 0.6316, "lr": 3.994716797270414e-05, "epoch": 0.6042486231313926, "percentage": 12.11, "elapsed_time": "2:25:42", "remaining_time": "17:37:08"}
193
+ {"current_steps": 193, "total_steps": 1585, "loss": 0.6333, "lr": 3.9943919037297674e-05, "epoch": 0.6073957513768686, "percentage": 12.18, "elapsed_time": "2:26:28", "remaining_time": "17:36:29"}
194
+ {"current_steps": 194, "total_steps": 1585, "loss": 0.6382, "lr": 3.9940573302857675e-05, "epoch": 0.6105428796223447, "percentage": 12.24, "elapsed_time": "2:27:16", "remaining_time": "17:36:01"}
195
+ {"current_steps": 195, "total_steps": 1585, "loss": 0.6326, "lr": 3.993713078562288e-05, "epoch": 0.6136900078678206, "percentage": 12.3, "elapsed_time": "2:28:02", "remaining_time": "17:35:14"}
196
+ {"current_steps": 196, "total_steps": 1585, "loss": 0.6378, "lr": 3.993359150230177e-05, "epoch": 0.6168371361132966, "percentage": 12.37, "elapsed_time": "2:28:48", "remaining_time": "17:34:31"}
197
+ {"current_steps": 197, "total_steps": 1585, "loss": 0.6264, "lr": 3.992995547007245e-05, "epoch": 0.6199842643587726, "percentage": 12.43, "elapsed_time": "2:29:34", "remaining_time": "17:33:52"}
198
+ {"current_steps": 198, "total_steps": 1585, "loss": 0.6284, "lr": 3.992622270658264e-05, "epoch": 0.6231313926042487, "percentage": 12.49, "elapsed_time": "2:30:19", "remaining_time": "17:32:59"}
199
+ {"current_steps": 199, "total_steps": 1585, "loss": 0.6328, "lr": 3.992239322994953e-05, "epoch": 0.6262785208497246, "percentage": 12.56, "elapsed_time": "2:31:05", "remaining_time": "17:32:20"}
200
+ {"current_steps": 200, "total_steps": 1585, "loss": 0.6387, "lr": 3.991846705875973e-05, "epoch": 0.6294256490952006, "percentage": 12.62, "elapsed_time": "2:31:49", "remaining_time": "17:31:20"}
201
+ {"current_steps": 201, "total_steps": 1585, "loss": 0.6352, "lr": 3.9914444212069144e-05, "epoch": 0.6325727773406766, "percentage": 12.68, "elapsed_time": "2:32:31", "remaining_time": "17:30:13"}
202
+ {"current_steps": 202, "total_steps": 1585, "loss": 0.637, "lr": 3.99103247094029e-05, "epoch": 0.6357199055861527, "percentage": 12.74, "elapsed_time": "2:33:20", "remaining_time": "17:29:49"}
203
+ {"current_steps": 203, "total_steps": 1585, "loss": 0.6343, "lr": 3.990610857075527e-05, "epoch": 0.6388670338316287, "percentage": 12.81, "elapsed_time": "2:34:07", "remaining_time": "17:29:16"}
204
+ {"current_steps": 204, "total_steps": 1585, "loss": 0.6325, "lr": 3.990179581658953e-05, "epoch": 0.6420141620771046, "percentage": 12.87, "elapsed_time": "2:34:55", "remaining_time": "17:28:45"}
205
+ {"current_steps": 205, "total_steps": 1585, "loss": 0.6249, "lr": 3.98973864678379e-05, "epoch": 0.6451612903225806, "percentage": 12.93, "elapsed_time": "2:35:38", "remaining_time": "17:27:43"}
206
+ {"current_steps": 206, "total_steps": 1585, "loss": 0.6218, "lr": 3.9892880545901436e-05, "epoch": 0.6483084185680567, "percentage": 13.0, "elapsed_time": "2:36:22", "remaining_time": "17:26:46"}
207
+ {"current_steps": 207, "total_steps": 1585, "loss": 0.6367, "lr": 3.988827807264989e-05, "epoch": 0.6514555468135327, "percentage": 13.06, "elapsed_time": "2:37:07", "remaining_time": "17:25:56"}
208
+ {"current_steps": 208, "total_steps": 1585, "loss": 0.6426, "lr": 3.988357907042165e-05, "epoch": 0.6546026750590087, "percentage": 13.12, "elapsed_time": "2:37:51", "remaining_time": "17:25:02"}
209
+ {"current_steps": 209, "total_steps": 1585, "loss": 0.644, "lr": 3.9878783562023615e-05, "epoch": 0.6577498033044846, "percentage": 13.19, "elapsed_time": "2:38:39", "remaining_time": "17:24:33"}
210
+ {"current_steps": 210, "total_steps": 1585, "loss": 0.6498, "lr": 3.987389157073108e-05, "epoch": 0.6608969315499607, "percentage": 13.25, "elapsed_time": "2:39:25", "remaining_time": "17:23:48"}
211
+ {"current_steps": 211, "total_steps": 1585, "loss": 0.6318, "lr": 3.986890312028763e-05, "epoch": 0.6640440597954367, "percentage": 13.31, "elapsed_time": "2:40:12", "remaining_time": "17:23:15"}
212
+ {"current_steps": 212, "total_steps": 1585, "loss": 0.6394, "lr": 3.9863818234904996e-05, "epoch": 0.6671911880409127, "percentage": 13.38, "elapsed_time": "2:40:58", "remaining_time": "17:22:34"}
213
+ {"current_steps": 213, "total_steps": 1585, "loss": 0.6412, "lr": 3.985863693926301e-05, "epoch": 0.6703383162863886, "percentage": 13.44, "elapsed_time": "2:41:39", "remaining_time": "17:21:17"}
214
+ {"current_steps": 214, "total_steps": 1585, "loss": 0.6377, "lr": 3.9853359258509375e-05, "epoch": 0.6734854445318647, "percentage": 13.5, "elapsed_time": "2:42:20", "remaining_time": "17:20:01"}
215
+ {"current_steps": 215, "total_steps": 1585, "loss": 0.6323, "lr": 3.984798521825966e-05, "epoch": 0.6766325727773407, "percentage": 13.56, "elapsed_time": "2:43:06", "remaining_time": "17:19:21"}
216
+ {"current_steps": 216, "total_steps": 1585, "loss": 0.6411, "lr": 3.9842514844597106e-05, "epoch": 0.6797797010228167, "percentage": 13.63, "elapsed_time": "2:43:51", "remaining_time": "17:18:28"}
217
+ {"current_steps": 217, "total_steps": 1585, "loss": 0.6287, "lr": 3.983694816407248e-05, "epoch": 0.6829268292682927, "percentage": 13.69, "elapsed_time": "2:44:36", "remaining_time": "17:17:44"}
218
+ {"current_steps": 218, "total_steps": 1585, "loss": 0.6226, "lr": 3.983128520370403e-05, "epoch": 0.6860739575137687, "percentage": 13.75, "elapsed_time": "2:45:23", "remaining_time": "17:17:06"}
219
+ {"current_steps": 219, "total_steps": 1585, "loss": 0.629, "lr": 3.982552599097727e-05, "epoch": 0.6892210857592447, "percentage": 13.82, "elapsed_time": "2:46:11", "remaining_time": "17:16:37"}
220
+ {"current_steps": 220, "total_steps": 1585, "loss": 0.6337, "lr": 3.9819670553844885e-05, "epoch": 0.6923682140047207, "percentage": 13.88, "elapsed_time": "2:46:58", "remaining_time": "17:16:01"}
221
+ {"current_steps": 221, "total_steps": 1585, "loss": 0.6241, "lr": 3.981371892072661e-05, "epoch": 0.6955153422501967, "percentage": 13.94, "elapsed_time": "2:47:43", "remaining_time": "17:15:10"}
222
+ {"current_steps": 222, "total_steps": 1585, "loss": 0.6354, "lr": 3.9807671120509074e-05, "epoch": 0.6986624704956726, "percentage": 14.01, "elapsed_time": "2:48:30", "remaining_time": "17:14:33"}
223
+ {"current_steps": 223, "total_steps": 1585, "loss": 0.625, "lr": 3.9801527182545624e-05, "epoch": 0.7018095987411487, "percentage": 14.07, "elapsed_time": "2:49:15", "remaining_time": "17:13:44"}
224
+ {"current_steps": 224, "total_steps": 1585, "loss": 0.6301, "lr": 3.979528713665624e-05, "epoch": 0.7049567269866247, "percentage": 14.13, "elapsed_time": "2:50:02", "remaining_time": "17:13:11"}
225
+ {"current_steps": 225, "total_steps": 1585, "loss": 0.6292, "lr": 3.978895101312738e-05, "epoch": 0.7081038552321007, "percentage": 14.2, "elapsed_time": "2:50:47", "remaining_time": "17:12:18"}
226
+ {"current_steps": 226, "total_steps": 1585, "loss": 0.626, "lr": 3.9782518842711795e-05, "epoch": 0.7112509834775768, "percentage": 14.26, "elapsed_time": "2:51:34", "remaining_time": "17:11:42"}
227
+ {"current_steps": 227, "total_steps": 1585, "loss": 0.6246, "lr": 3.977599065662843e-05, "epoch": 0.7143981117230527, "percentage": 14.32, "elapsed_time": "2:52:19", "remaining_time": "17:10:55"}
228
+ {"current_steps": 228, "total_steps": 1585, "loss": 0.6282, "lr": 3.976936648656223e-05, "epoch": 0.7175452399685287, "percentage": 14.38, "elapsed_time": "2:53:05", "remaining_time": "17:10:12"}
229
+ {"current_steps": 229, "total_steps": 1585, "loss": 0.6271, "lr": 3.976264636466401e-05, "epoch": 0.7206923682140047, "percentage": 14.45, "elapsed_time": "2:53:49", "remaining_time": "17:09:17"}
230
+ {"current_steps": 230, "total_steps": 1585, "loss": 0.6299, "lr": 3.97558303235503e-05, "epoch": 0.7238394964594808, "percentage": 14.51, "elapsed_time": "2:54:35", "remaining_time": "17:08:33"}
231
+ {"current_steps": 231, "total_steps": 1585, "loss": 0.6273, "lr": 3.9748918396303166e-05, "epoch": 0.7269866247049567, "percentage": 14.57, "elapsed_time": "2:55:21", "remaining_time": "17:07:50"}
232
+ {"current_steps": 232, "total_steps": 1585, "loss": 0.6364, "lr": 3.974191061647007e-05, "epoch": 0.7301337529504327, "percentage": 14.64, "elapsed_time": "2:56:04", "remaining_time": "17:06:50"}
233
+ {"current_steps": 233, "total_steps": 1585, "loss": 0.6081, "lr": 3.973480701806371e-05, "epoch": 0.7332808811959087, "percentage": 14.7, "elapsed_time": "2:56:50", "remaining_time": "17:06:05"}
234
+ {"current_steps": 234, "total_steps": 1585, "loss": 0.6335, "lr": 3.972760763556183e-05, "epoch": 0.7364280094413848, "percentage": 14.76, "elapsed_time": "2:57:36", "remaining_time": "17:05:23"}
235
+ {"current_steps": 235, "total_steps": 1585, "loss": 0.6245, "lr": 3.972031250390707e-05, "epoch": 0.7395751376868608, "percentage": 14.83, "elapsed_time": "2:58:22", "remaining_time": "17:04:42"}
236
+ {"current_steps": 236, "total_steps": 1585, "loss": 0.6174, "lr": 3.97129216585068e-05, "epoch": 0.7427222659323367, "percentage": 14.89, "elapsed_time": "2:59:10", "remaining_time": "17:04:09"}
237
+ {"current_steps": 237, "total_steps": 1585, "loss": 0.6259, "lr": 3.9705435135232954e-05, "epoch": 0.7458693941778127, "percentage": 14.95, "elapsed_time": "2:59:55", "remaining_time": "17:03:23"}
238
+ {"current_steps": 238, "total_steps": 1585, "loss": 0.6156, "lr": 3.9697852970421816e-05, "epoch": 0.7490165224232888, "percentage": 15.02, "elapsed_time": "3:00:40", "remaining_time": "17:02:32"}
239
+ {"current_steps": 239, "total_steps": 1585, "loss": 0.6235, "lr": 3.96901752008739e-05, "epoch": 0.7521636506687648, "percentage": 15.08, "elapsed_time": "3:01:26", "remaining_time": "17:01:48"}
240
+ {"current_steps": 240, "total_steps": 1585, "loss": 0.6308, "lr": 3.968240186385372e-05, "epoch": 0.7553107789142408, "percentage": 15.14, "elapsed_time": "3:02:14", "remaining_time": "17:01:20"}
241
+ {"current_steps": 241, "total_steps": 1585, "loss": 0.6249, "lr": 3.967453299708965e-05, "epoch": 0.7584579071597167, "percentage": 15.21, "elapsed_time": "3:03:00", "remaining_time": "17:00:36"}
242
+ {"current_steps": 242, "total_steps": 1585, "loss": 0.6222, "lr": 3.966656863877371e-05, "epoch": 0.7616050354051928, "percentage": 15.27, "elapsed_time": "3:03:43", "remaining_time": "16:59:38"}
243
+ {"current_steps": 243, "total_steps": 1585, "loss": 0.6117, "lr": 3.965850882756141e-05, "epoch": 0.7647521636506688, "percentage": 15.33, "elapsed_time": "3:04:28", "remaining_time": "16:58:47"}
244
+ {"current_steps": 244, "total_steps": 1585, "loss": 0.6164, "lr": 3.9650353602571535e-05, "epoch": 0.7678992918961448, "percentage": 15.39, "elapsed_time": "3:05:14", "remaining_time": "16:58:03"}
245
+ {"current_steps": 245, "total_steps": 1585, "loss": 0.6322, "lr": 3.9642103003385976e-05, "epoch": 0.7710464201416207, "percentage": 15.46, "elapsed_time": "3:05:57", "remaining_time": "16:57:02"}
246
+ {"current_steps": 246, "total_steps": 1585, "loss": 0.6212, "lr": 3.963375707004951e-05, "epoch": 0.7741935483870968, "percentage": 15.52, "elapsed_time": "3:06:40", "remaining_time": "16:56:03"}
247
+ {"current_steps": 247, "total_steps": 1585, "loss": 0.6272, "lr": 3.9625315843069635e-05, "epoch": 0.7773406766325728, "percentage": 15.58, "elapsed_time": "3:07:25", "remaining_time": "16:55:14"}
248
+ {"current_steps": 248, "total_steps": 1585, "loss": 0.627, "lr": 3.9616779363416375e-05, "epoch": 0.7804878048780488, "percentage": 15.65, "elapsed_time": "3:08:13", "remaining_time": "16:54:42"}
249
+ {"current_steps": 249, "total_steps": 1585, "loss": 0.632, "lr": 3.9608147672522056e-05, "epoch": 0.7836349331235248, "percentage": 15.71, "elapsed_time": "3:08:57", "remaining_time": "16:53:48"}
250
+ {"current_steps": 250, "total_steps": 1585, "loss": 0.6334, "lr": 3.959942081228111e-05, "epoch": 0.7867820613690008, "percentage": 15.77, "elapsed_time": "3:09:43", "remaining_time": "16:53:10"}
251
+ {"current_steps": 251, "total_steps": 1585, "loss": 0.6347, "lr": 3.9590598825049896e-05, "epoch": 0.7899291896144768, "percentage": 15.84, "elapsed_time": "3:10:31", "remaining_time": "16:52:37"}
252
+ {"current_steps": 252, "total_steps": 1585, "loss": 0.6291, "lr": 3.958168175364646e-05, "epoch": 0.7930763178599528, "percentage": 15.9, "elapsed_time": "3:11:19", "remaining_time": "16:52:00"}
253
+ {"current_steps": 253, "total_steps": 1585, "loss": 0.6227, "lr": 3.9572669641350366e-05, "epoch": 0.7962234461054288, "percentage": 15.96, "elapsed_time": "3:12:02", "remaining_time": "16:51:06"}
254
+ {"current_steps": 254, "total_steps": 1585, "loss": 0.6287, "lr": 3.956356253190245e-05, "epoch": 0.7993705743509048, "percentage": 16.03, "elapsed_time": "3:12:46", "remaining_time": "16:50:08"}
255
+ {"current_steps": 255, "total_steps": 1585, "loss": 0.6349, "lr": 3.9554360469504616e-05, "epoch": 0.8025177025963808, "percentage": 16.09, "elapsed_time": "3:13:32", "remaining_time": "16:49:27"}
256
+ {"current_steps": 256, "total_steps": 1585, "loss": 0.6322, "lr": 3.9545063498819655e-05, "epoch": 0.8056648308418568, "percentage": 16.15, "elapsed_time": "3:14:18", "remaining_time": "16:48:43"}
257
+ {"current_steps": 257, "total_steps": 1585, "loss": 0.6236, "lr": 3.9535671664970976e-05, "epoch": 0.8088119590873328, "percentage": 16.21, "elapsed_time": "3:15:03", "remaining_time": "16:47:56"}
258
+ {"current_steps": 258, "total_steps": 1585, "loss": 0.6266, "lr": 3.952618501354241e-05, "epoch": 0.8119590873328089, "percentage": 16.28, "elapsed_time": "3:15:46", "remaining_time": "16:46:54"}
259
+ {"current_steps": 259, "total_steps": 1585, "loss": 0.6348, "lr": 3.951660359057802e-05, "epoch": 0.8151062155782848, "percentage": 16.34, "elapsed_time": "3:16:34", "remaining_time": "16:46:22"}
260
+ {"current_steps": 260, "total_steps": 1585, "loss": 0.6185, "lr": 3.9506927442581816e-05, "epoch": 0.8182533438237608, "percentage": 16.4, "elapsed_time": "3:17:19", "remaining_time": "16:45:35"}
261
+ {"current_steps": 261, "total_steps": 1585, "loss": 0.6275, "lr": 3.9497156616517584e-05, "epoch": 0.8214004720692368, "percentage": 16.47, "elapsed_time": "3:18:05", "remaining_time": "16:44:50"}
262
+ {"current_steps": 262, "total_steps": 1585, "loss": 0.618, "lr": 3.948729115980862e-05, "epoch": 0.8245476003147129, "percentage": 16.53, "elapsed_time": "3:18:52", "remaining_time": "16:44:14"}
263
+ {"current_steps": 263, "total_steps": 1585, "loss": 0.6261, "lr": 3.947733112033753e-05, "epoch": 0.8276947285601888, "percentage": 16.59, "elapsed_time": "3:19:38", "remaining_time": "16:43:29"}
264
+ {"current_steps": 264, "total_steps": 1585, "loss": 0.6129, "lr": 3.946727654644597e-05, "epoch": 0.8308418568056648, "percentage": 16.66, "elapsed_time": "3:20:23", "remaining_time": "16:42:41"}
265
+ {"current_steps": 265, "total_steps": 1585, "loss": 0.6156, "lr": 3.945712748693443e-05, "epoch": 0.8339889850511408, "percentage": 16.72, "elapsed_time": "3:21:10", "remaining_time": "16:42:05"}
266
+ {"current_steps": 266, "total_steps": 1585, "loss": 0.6197, "lr": 3.9446883991062e-05, "epoch": 0.8371361132966169, "percentage": 16.78, "elapsed_time": "3:21:55", "remaining_time": "16:41:14"}
267
+ {"current_steps": 267, "total_steps": 1585, "loss": 0.6061, "lr": 3.94365461085461e-05, "epoch": 0.8402832415420929, "percentage": 16.85, "elapsed_time": "3:22:41", "remaining_time": "16:40:31"}
268
+ {"current_steps": 268, "total_steps": 1585, "loss": 0.6227, "lr": 3.94261138895623e-05, "epoch": 0.8434303697875688, "percentage": 16.91, "elapsed_time": "3:23:26", "remaining_time": "16:39:46"}
269
+ {"current_steps": 269, "total_steps": 1585, "loss": 0.6323, "lr": 3.9415587384744e-05, "epoch": 0.8465774980330448, "percentage": 16.97, "elapsed_time": "3:24:09", "remaining_time": "16:38:45"}
270
+ {"current_steps": 270, "total_steps": 1585, "loss": 0.6207, "lr": 3.940496664518223e-05, "epoch": 0.8497246262785209, "percentage": 17.03, "elapsed_time": "3:24:50", "remaining_time": "16:37:38"}
271
+ {"current_steps": 271, "total_steps": 1585, "loss": 0.6068, "lr": 3.939425172242541e-05, "epoch": 0.8528717545239969, "percentage": 17.1, "elapsed_time": "3:25:34", "remaining_time": "16:36:46"}
272
+ {"current_steps": 272, "total_steps": 1585, "loss": 0.6194, "lr": 3.9383442668479074e-05, "epoch": 0.8560188827694729, "percentage": 17.16, "elapsed_time": "3:26:19", "remaining_time": "16:35:59"}
273
+ {"current_steps": 273, "total_steps": 1585, "loss": 0.621, "lr": 3.937253953580562e-05, "epoch": 0.8591660110149488, "percentage": 17.22, "elapsed_time": "3:27:03", "remaining_time": "16:35:06"}
274
+ {"current_steps": 274, "total_steps": 1585, "loss": 0.6143, "lr": 3.936154237732409e-05, "epoch": 0.8623131392604249, "percentage": 17.29, "elapsed_time": "3:27:52", "remaining_time": "16:34:36"}
275
+ {"current_steps": 275, "total_steps": 1585, "loss": 0.6128, "lr": 3.935045124640985e-05, "epoch": 0.8654602675059009, "percentage": 17.35, "elapsed_time": "3:28:37", "remaining_time": "16:33:51"}
276
+ {"current_steps": 276, "total_steps": 1585, "loss": 0.6227, "lr": 3.933926619689438e-05, "epoch": 0.8686073957513769, "percentage": 17.41, "elapsed_time": "3:29:23", "remaining_time": "16:33:07"}
277
+ {"current_steps": 277, "total_steps": 1585, "loss": 0.6166, "lr": 3.932798728306502e-05, "epoch": 0.8717545239968528, "percentage": 17.48, "elapsed_time": "3:30:04", "remaining_time": "16:31:58"}
278
+ {"current_steps": 278, "total_steps": 1585, "loss": 0.6093, "lr": 3.931661455966465e-05, "epoch": 0.8749016522423289, "percentage": 17.54, "elapsed_time": "3:30:49", "remaining_time": "16:31:11"}
279
+ {"current_steps": 279, "total_steps": 1585, "loss": 0.6155, "lr": 3.930514808189149e-05, "epoch": 0.8780487804878049, "percentage": 17.6, "elapsed_time": "3:31:34", "remaining_time": "16:30:22"}
280
+ {"current_steps": 280, "total_steps": 1585, "loss": 0.6253, "lr": 3.929358790539881e-05, "epoch": 0.8811959087332809, "percentage": 17.67, "elapsed_time": "3:32:19", "remaining_time": "16:29:35"}
281
+ {"current_steps": 281, "total_steps": 1585, "loss": 0.6239, "lr": 3.92819340862946e-05, "epoch": 0.8843430369787569, "percentage": 17.73, "elapsed_time": "3:33:05", "remaining_time": "16:28:53"}
282
+ {"current_steps": 282, "total_steps": 1585, "loss": 0.6211, "lr": 3.927018668114141e-05, "epoch": 0.8874901652242329, "percentage": 17.79, "elapsed_time": "3:33:50", "remaining_time": "16:28:04"}
283
+ {"current_steps": 283, "total_steps": 1585, "loss": 0.6182, "lr": 3.925834574695599e-05, "epoch": 0.8906372934697089, "percentage": 17.85, "elapsed_time": "3:34:36", "remaining_time": "16:27:21"}
284
+ {"current_steps": 284, "total_steps": 1585, "loss": 0.6221, "lr": 3.924641134120903e-05, "epoch": 0.8937844217151849, "percentage": 17.92, "elapsed_time": "3:35:21", "remaining_time": "16:26:33"}
285
+ {"current_steps": 285, "total_steps": 1585, "loss": 0.6161, "lr": 3.9234383521824905e-05, "epoch": 0.8969315499606609, "percentage": 17.98, "elapsed_time": "3:36:08", "remaining_time": "16:25:52"}
286
+ {"current_steps": 286, "total_steps": 1585, "loss": 0.6148, "lr": 3.922226234718137e-05, "epoch": 0.9000786782061369, "percentage": 18.04, "elapsed_time": "3:36:48", "remaining_time": "16:24:45"}
287
+ {"current_steps": 287, "total_steps": 1585, "loss": 0.6138, "lr": 3.92100478761093e-05, "epoch": 0.9032258064516129, "percentage": 18.11, "elapsed_time": "3:37:36", "remaining_time": "16:24:11"}
288
+ {"current_steps": 288, "total_steps": 1585, "loss": 0.621, "lr": 3.919774016789237e-05, "epoch": 0.9063729346970889, "percentage": 18.17, "elapsed_time": "3:38:21", "remaining_time": "16:23:22"}
289
+ {"current_steps": 289, "total_steps": 1585, "loss": 0.6156, "lr": 3.918533928226679e-05, "epoch": 0.9095200629425649, "percentage": 18.23, "elapsed_time": "3:39:03", "remaining_time": "16:22:23"}
290
+ {"current_steps": 290, "total_steps": 1585, "loss": 0.6167, "lr": 3.917284527942103e-05, "epoch": 0.912667191188041, "percentage": 18.3, "elapsed_time": "3:39:50", "remaining_time": "16:21:40"}
291
+ {"current_steps": 291, "total_steps": 1585, "loss": 0.6161, "lr": 3.91602582199955e-05, "epoch": 0.9158143194335169, "percentage": 18.36, "elapsed_time": "3:40:37", "remaining_time": "16:21:02"}
292
+ {"current_steps": 292, "total_steps": 1585, "loss": 0.614, "lr": 3.914757816508225e-05, "epoch": 0.9189614476789929, "percentage": 18.42, "elapsed_time": "3:41:23", "remaining_time": "16:20:18"}
293
+ {"current_steps": 293, "total_steps": 1585, "loss": 0.6208, "lr": 3.913480517622472e-05, "epoch": 0.9221085759244689, "percentage": 18.49, "elapsed_time": "3:42:09", "remaining_time": "16:19:35"}
294
+ {"current_steps": 294, "total_steps": 1585, "loss": 0.6286, "lr": 3.9121939315417386e-05, "epoch": 0.925255704169945, "percentage": 18.55, "elapsed_time": "3:42:53", "remaining_time": "16:18:44"}
295
+ {"current_steps": 295, "total_steps": 1585, "loss": 0.6218, "lr": 3.910898064510549e-05, "epoch": 0.9284028324154209, "percentage": 18.61, "elapsed_time": "3:43:38", "remaining_time": "16:17:56"}
296
+ {"current_steps": 296, "total_steps": 1585, "loss": 0.621, "lr": 3.909592922818474e-05, "epoch": 0.9315499606608969, "percentage": 18.68, "elapsed_time": "3:44:24", "remaining_time": "16:17:15"}
297
+ {"current_steps": 297, "total_steps": 1585, "loss": 0.6215, "lr": 3.908278512800098e-05, "epoch": 0.9346970889063729, "percentage": 18.74, "elapsed_time": "3:45:10", "remaining_time": "16:16:30"}
298
+ {"current_steps": 298, "total_steps": 1585, "loss": 0.6214, "lr": 3.906954840834991e-05, "epoch": 0.937844217151849, "percentage": 18.8, "elapsed_time": "3:45:52", "remaining_time": "16:15:31"}
299
+ {"current_steps": 299, "total_steps": 1585, "loss": 0.6199, "lr": 3.9056219133476766e-05, "epoch": 0.940991345397325, "percentage": 18.86, "elapsed_time": "3:46:40", "remaining_time": "16:14:56"}
300
+ {"current_steps": 300, "total_steps": 1585, "loss": 0.6104, "lr": 3.904279736807599e-05, "epoch": 0.9441384736428009, "percentage": 18.93, "elapsed_time": "3:47:22", "remaining_time": "16:13:56"}
301
+ {"current_steps": 301, "total_steps": 1585, "loss": 0.6081, "lr": 3.9029283177290944e-05, "epoch": 0.9472856018882769, "percentage": 18.99, "elapsed_time": "3:48:09", "remaining_time": "16:13:14"}
302
+ {"current_steps": 302, "total_steps": 1585, "loss": 0.6182, "lr": 3.901567662671359e-05, "epoch": 0.950432730133753, "percentage": 19.05, "elapsed_time": "3:48:56", "remaining_time": "16:12:37"}
303
+ {"current_steps": 303, "total_steps": 1585, "loss": 0.6272, "lr": 3.9001977782384154e-05, "epoch": 0.953579858379229, "percentage": 19.12, "elapsed_time": "3:49:42", "remaining_time": "16:11:52"}
304
+ {"current_steps": 304, "total_steps": 1585, "loss": 0.6193, "lr": 3.898818671079081e-05, "epoch": 0.956726986624705, "percentage": 19.18, "elapsed_time": "3:50:29", "remaining_time": "16:11:15"}
305
+ {"current_steps": 305, "total_steps": 1585, "loss": 0.623, "lr": 3.897430347886937e-05, "epoch": 0.9598741148701809, "percentage": 19.24, "elapsed_time": "3:51:14", "remaining_time": "16:10:26"}
306
+ {"current_steps": 306, "total_steps": 1585, "loss": 0.6061, "lr": 3.896032815400295e-05, "epoch": 0.963021243115657, "percentage": 19.31, "elapsed_time": "3:51:59", "remaining_time": "16:09:39"}
307
+ {"current_steps": 307, "total_steps": 1585, "loss": 0.6158, "lr": 3.894626080402166e-05, "epoch": 0.966168371361133, "percentage": 19.37, "elapsed_time": "3:52:46", "remaining_time": "16:09:01"}
308
+ {"current_steps": 308, "total_steps": 1585, "loss": 0.6135, "lr": 3.893210149720222e-05, "epoch": 0.969315499606609, "percentage": 19.43, "elapsed_time": "3:53:33", "remaining_time": "16:08:22"}
309
+ {"current_steps": 309, "total_steps": 1585, "loss": 0.6214, "lr": 3.8917850302267724e-05, "epoch": 0.9724626278520849, "percentage": 19.5, "elapsed_time": "3:54:20", "remaining_time": "16:07:41"}
310
+ {"current_steps": 310, "total_steps": 1585, "loss": 0.6098, "lr": 3.890350728838719e-05, "epoch": 0.975609756097561, "percentage": 19.56, "elapsed_time": "3:55:06", "remaining_time": "16:06:57"}
311
+ {"current_steps": 311, "total_steps": 1585, "loss": 0.6078, "lr": 3.888907252517534e-05, "epoch": 0.978756884343037, "percentage": 19.62, "elapsed_time": "3:55:53", "remaining_time": "16:06:17"}
312
+ {"current_steps": 312, "total_steps": 1585, "loss": 0.6148, "lr": 3.887454608269217e-05, "epoch": 0.981904012588513, "percentage": 19.68, "elapsed_time": "3:56:38", "remaining_time": "16:05:32"}
313
+ {"current_steps": 313, "total_steps": 1585, "loss": 0.6283, "lr": 3.885992803144266e-05, "epoch": 0.985051140833989, "percentage": 19.75, "elapsed_time": "3:57:22", "remaining_time": "16:04:39"}
314
+ {"current_steps": 314, "total_steps": 1585, "loss": 0.6179, "lr": 3.8845218442376416e-05, "epoch": 0.988198269079465, "percentage": 19.81, "elapsed_time": "3:58:08", "remaining_time": "16:03:55"}
315
+ {"current_steps": 315, "total_steps": 1585, "loss": 0.6138, "lr": 3.883041738688733e-05, "epoch": 0.991345397324941, "percentage": 19.87, "elapsed_time": "3:58:56", "remaining_time": "16:03:21"}
316
+ {"current_steps": 316, "total_steps": 1585, "loss": 0.6024, "lr": 3.8815524936813236e-05, "epoch": 0.994492525570417, "percentage": 19.94, "elapsed_time": "3:59:38", "remaining_time": "16:02:22"}
317
+ {"current_steps": 317, "total_steps": 1585, "loss": 0.6101, "lr": 3.880054116443556e-05, "epoch": 0.997639653815893, "percentage": 20.0, "elapsed_time": "4:00:22", "remaining_time": "16:01:29"}
318
+ {"current_steps": 318, "total_steps": 1585, "loss": 1.027, "lr": 3.878546614247894e-05, "epoch": 1.002360346184107, "percentage": 20.06, "elapsed_time": "4:01:51", "remaining_time": "16:03:37"}
319
+ {"current_steps": 319, "total_steps": 1585, "loss": 0.5977, "lr": 3.8770299944110934e-05, "epoch": 1.005507474429583, "percentage": 20.13, "elapsed_time": "4:02:37", "remaining_time": "16:02:54"}
320
+ {"current_steps": 320, "total_steps": 1585, "loss": 0.5814, "lr": 3.875504264294161e-05, "epoch": 1.008654602675059, "percentage": 20.19, "elapsed_time": "4:03:25", "remaining_time": "16:02:15"}
321
+ {"current_steps": 321, "total_steps": 1585, "loss": 0.5838, "lr": 3.873969431302322e-05, "epoch": 1.011801730920535, "percentage": 20.25, "elapsed_time": "4:04:08", "remaining_time": "16:01:19"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c3e09e3487aeae4470ab4840ae48b4cf23121c4ecc0a42eea5737ade6c77a55
3
+ size 7160
vocab.json ADDED
The diff for this file is too large to render. See raw diff