Training in progress, step 200
Browse files- .gitattributes +1 -0
- added_tokens.json +24 -0
- config.json +33 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +346 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +209 -0
- trainer_log.jsonl +203 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"</tool_call>": 151658,
|
3 |
+
"<tool_call>": 151657,
|
4 |
+
"<|box_end|>": 151649,
|
5 |
+
"<|box_start|>": 151648,
|
6 |
+
"<|endoftext|>": 151643,
|
7 |
+
"<|file_sep|>": 151664,
|
8 |
+
"<|fim_middle|>": 151660,
|
9 |
+
"<|fim_pad|>": 151662,
|
10 |
+
"<|fim_prefix|>": 151659,
|
11 |
+
"<|fim_suffix|>": 151661,
|
12 |
+
"<|im_end|>": 151645,
|
13 |
+
"<|im_start|>": 151644,
|
14 |
+
"<|image_pad|>": 151655,
|
15 |
+
"<|object_ref_end|>": 151647,
|
16 |
+
"<|object_ref_start|>": 151646,
|
17 |
+
"<|quad_end|>": 151651,
|
18 |
+
"<|quad_start|>": 151650,
|
19 |
+
"<|repo_name|>": 151663,
|
20 |
+
"<|video_pad|>": 151656,
|
21 |
+
"<|vision_end|>": 151653,
|
22 |
+
"<|vision_pad|>": 151654,
|
23 |
+
"<|vision_start|>": 151652
|
24 |
+
}
|
config.json
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"Qwen2ForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_dropout": 0.0,
|
6 |
+
"bos_token_id": 151643,
|
7 |
+
"eos_token_id": 151645,
|
8 |
+
"hidden_act": "silu",
|
9 |
+
"hidden_size": 3584,
|
10 |
+
"initializer_range": 0.02,
|
11 |
+
"intermediate_size": 18944,
|
12 |
+
"max_position_embeddings": 32768,
|
13 |
+
"max_window_layers": 28,
|
14 |
+
"model_type": "qwen2",
|
15 |
+
"num_attention_heads": 28,
|
16 |
+
"num_hidden_layers": 28,
|
17 |
+
"num_key_value_heads": 4,
|
18 |
+
"rms_norm_eps": 1e-06,
|
19 |
+
"rope_scaling": {
|
20 |
+
"factor": 4.0,
|
21 |
+
"original_max_position_embeddings": 32768,
|
22 |
+
"rope_type": "yarn",
|
23 |
+
"type": "yarn"
|
24 |
+
},
|
25 |
+
"rope_theta": 1000000.0,
|
26 |
+
"sliding_window": 131072,
|
27 |
+
"tie_word_embeddings": false,
|
28 |
+
"torch_dtype": "bfloat16",
|
29 |
+
"transformers_version": "4.51.3",
|
30 |
+
"use_cache": false,
|
31 |
+
"use_sliding_window": false,
|
32 |
+
"vocab_size": 152064
|
33 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model-00001-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6e5d217b3b2fa5af7e85b82b192428c72dd12c54db827c31cb1a3bfb88639ff
|
3 |
+
size 4877660776
|
model-00002-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aeda19e3d530085e3d61786b7712b9c6d263e4f152135cffaf00c3460433f2b8
|
3 |
+
size 4932751008
|
model-00003-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40d1652a90a37a769062f637331ec804e9c25334c24b0d31242ee874459ea054
|
3 |
+
size 4330865200
|
model-00004-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0d6aea6e554a17f23556d62540c09bcf57b44366e152661637837aa888a22f17
|
3 |
+
size 1089994880
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 15231233024
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
92 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
104 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
116 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
212 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
224 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
236 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
248 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
320 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
332 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
344 |
+
"model.norm.weight": "model-00003-of-00004.safetensors"
|
345 |
+
}
|
346 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|im_start|>",
|
4 |
+
"<|im_end|>",
|
5 |
+
"<|object_ref_start|>",
|
6 |
+
"<|object_ref_end|>",
|
7 |
+
"<|box_start|>",
|
8 |
+
"<|box_end|>",
|
9 |
+
"<|quad_start|>",
|
10 |
+
"<|quad_end|>",
|
11 |
+
"<|vision_start|>",
|
12 |
+
"<|vision_end|>",
|
13 |
+
"<|vision_pad|>",
|
14 |
+
"<|image_pad|>",
|
15 |
+
"<|video_pad|>"
|
16 |
+
],
|
17 |
+
"eos_token": {
|
18 |
+
"content": "<|im_end|>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"pad_token": {
|
25 |
+
"content": "<|endoftext|>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
3 |
+
size 11421896
|
tokenizer_config.json
ADDED
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": false,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"151643": {
|
6 |
+
"content": "<|endoftext|>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"151644": {
|
14 |
+
"content": "<|im_start|>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"151645": {
|
22 |
+
"content": "<|im_end|>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"151646": {
|
30 |
+
"content": "<|object_ref_start|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"151647": {
|
38 |
+
"content": "<|object_ref_end|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
"151648": {
|
46 |
+
"content": "<|box_start|>",
|
47 |
+
"lstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"rstrip": false,
|
50 |
+
"single_word": false,
|
51 |
+
"special": true
|
52 |
+
},
|
53 |
+
"151649": {
|
54 |
+
"content": "<|box_end|>",
|
55 |
+
"lstrip": false,
|
56 |
+
"normalized": false,
|
57 |
+
"rstrip": false,
|
58 |
+
"single_word": false,
|
59 |
+
"special": true
|
60 |
+
},
|
61 |
+
"151650": {
|
62 |
+
"content": "<|quad_start|>",
|
63 |
+
"lstrip": false,
|
64 |
+
"normalized": false,
|
65 |
+
"rstrip": false,
|
66 |
+
"single_word": false,
|
67 |
+
"special": true
|
68 |
+
},
|
69 |
+
"151651": {
|
70 |
+
"content": "<|quad_end|>",
|
71 |
+
"lstrip": false,
|
72 |
+
"normalized": false,
|
73 |
+
"rstrip": false,
|
74 |
+
"single_word": false,
|
75 |
+
"special": true
|
76 |
+
},
|
77 |
+
"151652": {
|
78 |
+
"content": "<|vision_start|>",
|
79 |
+
"lstrip": false,
|
80 |
+
"normalized": false,
|
81 |
+
"rstrip": false,
|
82 |
+
"single_word": false,
|
83 |
+
"special": true
|
84 |
+
},
|
85 |
+
"151653": {
|
86 |
+
"content": "<|vision_end|>",
|
87 |
+
"lstrip": false,
|
88 |
+
"normalized": false,
|
89 |
+
"rstrip": false,
|
90 |
+
"single_word": false,
|
91 |
+
"special": true
|
92 |
+
},
|
93 |
+
"151654": {
|
94 |
+
"content": "<|vision_pad|>",
|
95 |
+
"lstrip": false,
|
96 |
+
"normalized": false,
|
97 |
+
"rstrip": false,
|
98 |
+
"single_word": false,
|
99 |
+
"special": true
|
100 |
+
},
|
101 |
+
"151655": {
|
102 |
+
"content": "<|image_pad|>",
|
103 |
+
"lstrip": false,
|
104 |
+
"normalized": false,
|
105 |
+
"rstrip": false,
|
106 |
+
"single_word": false,
|
107 |
+
"special": true
|
108 |
+
},
|
109 |
+
"151656": {
|
110 |
+
"content": "<|video_pad|>",
|
111 |
+
"lstrip": false,
|
112 |
+
"normalized": false,
|
113 |
+
"rstrip": false,
|
114 |
+
"single_word": false,
|
115 |
+
"special": true
|
116 |
+
},
|
117 |
+
"151657": {
|
118 |
+
"content": "<tool_call>",
|
119 |
+
"lstrip": false,
|
120 |
+
"normalized": false,
|
121 |
+
"rstrip": false,
|
122 |
+
"single_word": false,
|
123 |
+
"special": false
|
124 |
+
},
|
125 |
+
"151658": {
|
126 |
+
"content": "</tool_call>",
|
127 |
+
"lstrip": false,
|
128 |
+
"normalized": false,
|
129 |
+
"rstrip": false,
|
130 |
+
"single_word": false,
|
131 |
+
"special": false
|
132 |
+
},
|
133 |
+
"151659": {
|
134 |
+
"content": "<|fim_prefix|>",
|
135 |
+
"lstrip": false,
|
136 |
+
"normalized": false,
|
137 |
+
"rstrip": false,
|
138 |
+
"single_word": false,
|
139 |
+
"special": false
|
140 |
+
},
|
141 |
+
"151660": {
|
142 |
+
"content": "<|fim_middle|>",
|
143 |
+
"lstrip": false,
|
144 |
+
"normalized": false,
|
145 |
+
"rstrip": false,
|
146 |
+
"single_word": false,
|
147 |
+
"special": false
|
148 |
+
},
|
149 |
+
"151661": {
|
150 |
+
"content": "<|fim_suffix|>",
|
151 |
+
"lstrip": false,
|
152 |
+
"normalized": false,
|
153 |
+
"rstrip": false,
|
154 |
+
"single_word": false,
|
155 |
+
"special": false
|
156 |
+
},
|
157 |
+
"151662": {
|
158 |
+
"content": "<|fim_pad|>",
|
159 |
+
"lstrip": false,
|
160 |
+
"normalized": false,
|
161 |
+
"rstrip": false,
|
162 |
+
"single_word": false,
|
163 |
+
"special": false
|
164 |
+
},
|
165 |
+
"151663": {
|
166 |
+
"content": "<|repo_name|>",
|
167 |
+
"lstrip": false,
|
168 |
+
"normalized": false,
|
169 |
+
"rstrip": false,
|
170 |
+
"single_word": false,
|
171 |
+
"special": false
|
172 |
+
},
|
173 |
+
"151664": {
|
174 |
+
"content": "<|file_sep|>",
|
175 |
+
"lstrip": false,
|
176 |
+
"normalized": false,
|
177 |
+
"rstrip": false,
|
178 |
+
"single_word": false,
|
179 |
+
"special": false
|
180 |
+
}
|
181 |
+
},
|
182 |
+
"additional_special_tokens": [
|
183 |
+
"<|im_start|>",
|
184 |
+
"<|im_end|>",
|
185 |
+
"<|object_ref_start|>",
|
186 |
+
"<|object_ref_end|>",
|
187 |
+
"<|box_start|>",
|
188 |
+
"<|box_end|>",
|
189 |
+
"<|quad_start|>",
|
190 |
+
"<|quad_end|>",
|
191 |
+
"<|vision_start|>",
|
192 |
+
"<|vision_end|>",
|
193 |
+
"<|vision_pad|>",
|
194 |
+
"<|image_pad|>",
|
195 |
+
"<|video_pad|>"
|
196 |
+
],
|
197 |
+
"bos_token": null,
|
198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
199 |
+
"clean_up_tokenization_spaces": false,
|
200 |
+
"eos_token": "<|im_end|>",
|
201 |
+
"errors": "replace",
|
202 |
+
"extra_special_tokens": {},
|
203 |
+
"model_max_length": 16384,
|
204 |
+
"pad_token": "<|endoftext|>",
|
205 |
+
"padding_side": "right",
|
206 |
+
"split_special_tokens": false,
|
207 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
208 |
+
"unk_token": null
|
209 |
+
}
|
trainer_log.jsonl
ADDED
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"current_steps": 1, "total_steps": 426, "loss": 0.8248, "lr": 0.0, "epoch": 0.00702576112412178, "percentage": 0.23, "elapsed_time": "0:01:09", "remaining_time": "8:14:16"}
|
2 |
+
{"current_steps": 2, "total_steps": 426, "loss": 0.7537, "lr": 2.3255813953488374e-07, "epoch": 0.01405152224824356, "percentage": 0.47, "elapsed_time": "0:02:06", "remaining_time": "7:26:57"}
|
3 |
+
{"current_steps": 3, "total_steps": 426, "loss": 0.7967, "lr": 4.651162790697675e-07, "epoch": 0.02107728337236534, "percentage": 0.7, "elapsed_time": "0:03:07", "remaining_time": "7:20:05"}
|
4 |
+
{"current_steps": 4, "total_steps": 426, "loss": 0.8444, "lr": 6.976744186046513e-07, "epoch": 0.02810304449648712, "percentage": 0.94, "elapsed_time": "0:04:07", "remaining_time": "7:15:03"}
|
5 |
+
{"current_steps": 5, "total_steps": 426, "loss": 0.7037, "lr": 9.30232558139535e-07, "epoch": 0.0351288056206089, "percentage": 1.17, "elapsed_time": "0:05:12", "remaining_time": "7:18:44"}
|
6 |
+
{"current_steps": 6, "total_steps": 426, "loss": 0.8214, "lr": 1.1627906976744188e-06, "epoch": 0.04215456674473068, "percentage": 1.41, "elapsed_time": "0:06:12", "remaining_time": "7:14:39"}
|
7 |
+
{"current_steps": 7, "total_steps": 426, "loss": 0.8083, "lr": 1.3953488372093025e-06, "epoch": 0.04918032786885246, "percentage": 1.64, "elapsed_time": "0:07:11", "remaining_time": "7:10:44"}
|
8 |
+
{"current_steps": 8, "total_steps": 426, "loss": 0.7147, "lr": 1.6279069767441862e-06, "epoch": 0.05620608899297424, "percentage": 1.88, "elapsed_time": "0:08:12", "remaining_time": "7:08:59"}
|
9 |
+
{"current_steps": 9, "total_steps": 426, "loss": 0.7483, "lr": 1.86046511627907e-06, "epoch": 0.06323185011709602, "percentage": 2.11, "elapsed_time": "0:09:15", "remaining_time": "7:08:53"}
|
10 |
+
{"current_steps": 10, "total_steps": 426, "loss": 0.7233, "lr": 2.0930232558139536e-06, "epoch": 0.0702576112412178, "percentage": 2.35, "elapsed_time": "0:10:13", "remaining_time": "7:05:26"}
|
11 |
+
{"current_steps": 11, "total_steps": 426, "loss": 0.7878, "lr": 2.3255813953488376e-06, "epoch": 0.07728337236533958, "percentage": 2.58, "elapsed_time": "0:11:10", "remaining_time": "7:01:45"}
|
12 |
+
{"current_steps": 12, "total_steps": 426, "loss": 0.76, "lr": 2.558139534883721e-06, "epoch": 0.08430913348946135, "percentage": 2.82, "elapsed_time": "0:12:10", "remaining_time": "7:00:06"}
|
13 |
+
{"current_steps": 13, "total_steps": 426, "loss": 0.731, "lr": 2.790697674418605e-06, "epoch": 0.09133489461358314, "percentage": 3.05, "elapsed_time": "0:13:12", "remaining_time": "6:59:35"}
|
14 |
+
{"current_steps": 14, "total_steps": 426, "loss": 0.7416, "lr": 3.0232558139534885e-06, "epoch": 0.09836065573770492, "percentage": 3.29, "elapsed_time": "0:14:14", "remaining_time": "6:59:04"}
|
15 |
+
{"current_steps": 15, "total_steps": 426, "loss": 0.6906, "lr": 3.2558139534883724e-06, "epoch": 0.1053864168618267, "percentage": 3.52, "elapsed_time": "0:15:12", "remaining_time": "6:56:46"}
|
16 |
+
{"current_steps": 16, "total_steps": 426, "loss": 0.6636, "lr": 3.4883720930232564e-06, "epoch": 0.11241217798594848, "percentage": 3.76, "elapsed_time": "0:16:10", "remaining_time": "6:54:16"}
|
17 |
+
{"current_steps": 17, "total_steps": 426, "loss": 0.6339, "lr": 3.72093023255814e-06, "epoch": 0.11943793911007025, "percentage": 3.99, "elapsed_time": "0:17:08", "remaining_time": "6:52:25"}
|
18 |
+
{"current_steps": 18, "total_steps": 426, "loss": 0.7124, "lr": 3.953488372093024e-06, "epoch": 0.12646370023419204, "percentage": 4.23, "elapsed_time": "0:18:08", "remaining_time": "6:51:12"}
|
19 |
+
{"current_steps": 19, "total_steps": 426, "loss": 0.6496, "lr": 4.186046511627907e-06, "epoch": 0.13348946135831383, "percentage": 4.46, "elapsed_time": "0:19:09", "remaining_time": "6:50:19"}
|
20 |
+
{"current_steps": 20, "total_steps": 426, "loss": 0.7656, "lr": 4.418604651162791e-06, "epoch": 0.1405152224824356, "percentage": 4.69, "elapsed_time": "0:20:12", "remaining_time": "6:50:08"}
|
21 |
+
{"current_steps": 21, "total_steps": 426, "loss": 0.6252, "lr": 4.651162790697675e-06, "epoch": 0.14754098360655737, "percentage": 4.93, "elapsed_time": "0:21:08", "remaining_time": "6:47:46"}
|
22 |
+
{"current_steps": 22, "total_steps": 426, "loss": 0.646, "lr": 4.883720930232559e-06, "epoch": 0.15456674473067916, "percentage": 5.16, "elapsed_time": "0:22:02", "remaining_time": "6:44:38"}
|
23 |
+
{"current_steps": 23, "total_steps": 426, "loss": 0.7369, "lr": 5.116279069767442e-06, "epoch": 0.16159250585480095, "percentage": 5.4, "elapsed_time": "0:23:01", "remaining_time": "6:43:20"}
|
24 |
+
{"current_steps": 24, "total_steps": 426, "loss": 0.6095, "lr": 5.348837209302326e-06, "epoch": 0.1686182669789227, "percentage": 5.63, "elapsed_time": "0:23:59", "remaining_time": "6:41:59"}
|
25 |
+
{"current_steps": 25, "total_steps": 426, "loss": 0.6875, "lr": 5.58139534883721e-06, "epoch": 0.1756440281030445, "percentage": 5.87, "elapsed_time": "0:25:04", "remaining_time": "6:42:04"}
|
26 |
+
{"current_steps": 26, "total_steps": 426, "loss": 0.614, "lr": 5.8139534883720935e-06, "epoch": 0.18266978922716628, "percentage": 6.1, "elapsed_time": "0:26:00", "remaining_time": "6:40:09"}
|
27 |
+
{"current_steps": 27, "total_steps": 426, "loss": 0.6459, "lr": 6.046511627906977e-06, "epoch": 0.18969555035128804, "percentage": 6.34, "elapsed_time": "0:26:58", "remaining_time": "6:38:43"}
|
28 |
+
{"current_steps": 28, "total_steps": 426, "loss": 0.5888, "lr": 6.279069767441861e-06, "epoch": 0.19672131147540983, "percentage": 6.57, "elapsed_time": "0:27:56", "remaining_time": "6:37:06"}
|
29 |
+
{"current_steps": 29, "total_steps": 426, "loss": 0.6198, "lr": 6.511627906976745e-06, "epoch": 0.20374707259953162, "percentage": 6.81, "elapsed_time": "0:28:53", "remaining_time": "6:35:30"}
|
30 |
+
{"current_steps": 30, "total_steps": 426, "loss": 0.6236, "lr": 6.744186046511628e-06, "epoch": 0.2107728337236534, "percentage": 7.04, "elapsed_time": "0:29:50", "remaining_time": "6:33:48"}
|
31 |
+
{"current_steps": 31, "total_steps": 426, "loss": 0.6386, "lr": 6.976744186046513e-06, "epoch": 0.21779859484777517, "percentage": 7.28, "elapsed_time": "0:30:47", "remaining_time": "6:32:21"}
|
32 |
+
{"current_steps": 32, "total_steps": 426, "loss": 0.5525, "lr": 7.209302325581395e-06, "epoch": 0.22482435597189696, "percentage": 7.51, "elapsed_time": "0:31:45", "remaining_time": "6:30:58"}
|
33 |
+
{"current_steps": 33, "total_steps": 426, "loss": 0.616, "lr": 7.44186046511628e-06, "epoch": 0.23185011709601874, "percentage": 7.75, "elapsed_time": "0:32:47", "remaining_time": "6:30:30"}
|
34 |
+
{"current_steps": 34, "total_steps": 426, "loss": 0.6507, "lr": 7.674418604651164e-06, "epoch": 0.2388758782201405, "percentage": 7.98, "elapsed_time": "0:33:46", "remaining_time": "6:29:24"}
|
35 |
+
{"current_steps": 35, "total_steps": 426, "loss": 0.6007, "lr": 7.906976744186048e-06, "epoch": 0.2459016393442623, "percentage": 8.22, "elapsed_time": "0:34:44", "remaining_time": "6:28:05"}
|
36 |
+
{"current_steps": 36, "total_steps": 426, "loss": 0.5901, "lr": 8.139534883720931e-06, "epoch": 0.2529274004683841, "percentage": 8.45, "elapsed_time": "0:35:40", "remaining_time": "6:26:30"}
|
37 |
+
{"current_steps": 37, "total_steps": 426, "loss": 0.6052, "lr": 8.372093023255815e-06, "epoch": 0.25995316159250587, "percentage": 8.69, "elapsed_time": "0:36:41", "remaining_time": "6:25:47"}
|
38 |
+
{"current_steps": 38, "total_steps": 426, "loss": 0.5328, "lr": 8.604651162790698e-06, "epoch": 0.26697892271662765, "percentage": 8.92, "elapsed_time": "0:37:44", "remaining_time": "6:25:24"}
|
39 |
+
{"current_steps": 39, "total_steps": 426, "loss": 0.5395, "lr": 8.837209302325582e-06, "epoch": 0.27400468384074944, "percentage": 9.15, "elapsed_time": "0:38:49", "remaining_time": "6:25:19"}
|
40 |
+
{"current_steps": 40, "total_steps": 426, "loss": 0.5548, "lr": 9.069767441860465e-06, "epoch": 0.2810304449648712, "percentage": 9.39, "elapsed_time": "0:39:51", "remaining_time": "6:24:35"}
|
41 |
+
{"current_steps": 41, "total_steps": 426, "loss": 0.5724, "lr": 9.30232558139535e-06, "epoch": 0.28805620608899296, "percentage": 9.62, "elapsed_time": "0:40:51", "remaining_time": "6:23:36"}
|
42 |
+
{"current_steps": 42, "total_steps": 426, "loss": 0.5813, "lr": 9.534883720930234e-06, "epoch": 0.29508196721311475, "percentage": 9.86, "elapsed_time": "0:41:58", "remaining_time": "6:23:46"}
|
43 |
+
{"current_steps": 43, "total_steps": 426, "loss": 0.5982, "lr": 9.767441860465117e-06, "epoch": 0.30210772833723654, "percentage": 10.09, "elapsed_time": "0:42:57", "remaining_time": "6:22:41"}
|
44 |
+
{"current_steps": 44, "total_steps": 426, "loss": 0.5235, "lr": 1e-05, "epoch": 0.3091334894613583, "percentage": 10.33, "elapsed_time": "0:43:56", "remaining_time": "6:21:27"}
|
45 |
+
{"current_steps": 45, "total_steps": 426, "loss": 0.5212, "lr": 9.99983179466314e-06, "epoch": 0.3161592505854801, "percentage": 10.56, "elapsed_time": "0:45:03", "remaining_time": "6:21:27"}
|
46 |
+
{"current_steps": 46, "total_steps": 426, "loss": 0.5614, "lr": 9.999327189969768e-06, "epoch": 0.3231850117096019, "percentage": 10.8, "elapsed_time": "0:46:04", "remaining_time": "6:20:37"}
|
47 |
+
{"current_steps": 47, "total_steps": 426, "loss": 0.6111, "lr": 9.998486219870769e-06, "epoch": 0.33021077283372363, "percentage": 11.03, "elapsed_time": "0:47:04", "remaining_time": "6:19:39"}
|
48 |
+
{"current_steps": 48, "total_steps": 426, "loss": 0.5876, "lr": 9.997308940948405e-06, "epoch": 0.3372365339578454, "percentage": 11.27, "elapsed_time": "0:48:03", "remaining_time": "6:18:26"}
|
49 |
+
{"current_steps": 49, "total_steps": 426, "loss": 0.5635, "lr": 9.995795432412513e-06, "epoch": 0.3442622950819672, "percentage": 11.5, "elapsed_time": "0:49:02", "remaining_time": "6:17:16"}
|
50 |
+
{"current_steps": 50, "total_steps": 426, "loss": 0.516, "lr": 9.993945796095183e-06, "epoch": 0.351288056206089, "percentage": 11.74, "elapsed_time": "0:49:59", "remaining_time": "6:15:54"}
|
51 |
+
{"current_steps": 51, "total_steps": 426, "loss": 0.6481, "lr": 9.991760156443892e-06, "epoch": 0.3583138173302108, "percentage": 11.97, "elapsed_time": "0:50:55", "remaining_time": "6:14:23"}
|
52 |
+
{"current_steps": 52, "total_steps": 426, "loss": 0.5661, "lr": 9.989238660513141e-06, "epoch": 0.36533957845433257, "percentage": 12.21, "elapsed_time": "0:51:50", "remaining_time": "6:12:53"}
|
53 |
+
{"current_steps": 53, "total_steps": 426, "loss": 0.5359, "lr": 9.98638147795456e-06, "epoch": 0.37236533957845436, "percentage": 12.44, "elapsed_time": "0:52:52", "remaining_time": "6:12:08"}
|
54 |
+
{"current_steps": 54, "total_steps": 426, "loss": 0.5787, "lr": 9.983188801005492e-06, "epoch": 0.3793911007025761, "percentage": 12.68, "elapsed_time": "0:53:50", "remaining_time": "6:10:56"}
|
55 |
+
{"current_steps": 55, "total_steps": 426, "loss": 0.5941, "lr": 9.979660844476056e-06, "epoch": 0.3864168618266979, "percentage": 12.91, "elapsed_time": "0:54:47", "remaining_time": "6:09:38"}
|
56 |
+
{"current_steps": 56, "total_steps": 426, "loss": 0.5344, "lr": 9.975797845734699e-06, "epoch": 0.39344262295081966, "percentage": 13.15, "elapsed_time": "0:55:44", "remaining_time": "6:08:19"}
|
57 |
+
{"current_steps": 57, "total_steps": 426, "loss": 0.5526, "lr": 9.971600064692222e-06, "epoch": 0.40046838407494145, "percentage": 13.38, "elapsed_time": "0:56:47", "remaining_time": "6:07:37"}
|
58 |
+
{"current_steps": 58, "total_steps": 426, "loss": 0.5758, "lr": 9.967067783784297e-06, "epoch": 0.40749414519906324, "percentage": 13.62, "elapsed_time": "0:57:47", "remaining_time": "6:06:40"}
|
59 |
+
{"current_steps": 59, "total_steps": 426, "loss": 0.5689, "lr": 9.962201307952455e-06, "epoch": 0.41451990632318503, "percentage": 13.85, "elapsed_time": "0:58:44", "remaining_time": "6:05:23"}
|
60 |
+
{"current_steps": 60, "total_steps": 426, "loss": 0.5275, "lr": 9.957000964623585e-06, "epoch": 0.4215456674473068, "percentage": 14.08, "elapsed_time": "0:59:42", "remaining_time": "6:04:13"}
|
61 |
+
{"current_steps": 61, "total_steps": 426, "loss": 0.5131, "lr": 9.951467103687879e-06, "epoch": 0.42857142857142855, "percentage": 14.32, "elapsed_time": "1:00:40", "remaining_time": "6:03:00"}
|
62 |
+
{"current_steps": 62, "total_steps": 426, "loss": 0.5849, "lr": 9.945600097475322e-06, "epoch": 0.43559718969555034, "percentage": 14.55, "elapsed_time": "1:01:34", "remaining_time": "6:01:31"}
|
63 |
+
{"current_steps": 63, "total_steps": 426, "loss": 0.5466, "lr": 9.939400340730611e-06, "epoch": 0.4426229508196721, "percentage": 14.79, "elapsed_time": "1:02:31", "remaining_time": "6:00:14"}
|
64 |
+
{"current_steps": 64, "total_steps": 426, "loss": 0.4886, "lr": 9.932868250586619e-06, "epoch": 0.4496487119437939, "percentage": 15.02, "elapsed_time": "1:03:30", "remaining_time": "5:59:10"}
|
65 |
+
{"current_steps": 65, "total_steps": 426, "loss": 0.5459, "lr": 9.926004266536314e-06, "epoch": 0.4566744730679157, "percentage": 15.26, "elapsed_time": "1:04:29", "remaining_time": "5:58:07"}
|
66 |
+
{"current_steps": 66, "total_steps": 426, "loss": 0.5319, "lr": 9.918808850403192e-06, "epoch": 0.4637002341920375, "percentage": 15.49, "elapsed_time": "1:05:24", "remaining_time": "5:56:43"}
|
67 |
+
{"current_steps": 67, "total_steps": 426, "loss": 0.6205, "lr": 9.911282486310214e-06, "epoch": 0.4707259953161593, "percentage": 15.73, "elapsed_time": "1:06:22", "remaining_time": "5:55:36"}
|
68 |
+
{"current_steps": 68, "total_steps": 426, "loss": 0.4813, "lr": 9.903425680647225e-06, "epoch": 0.477751756440281, "percentage": 15.96, "elapsed_time": "1:07:24", "remaining_time": "5:54:52"}
|
69 |
+
{"current_steps": 69, "total_steps": 426, "loss": 0.5635, "lr": 9.895238962036878e-06, "epoch": 0.4847775175644028, "percentage": 16.2, "elapsed_time": "1:08:23", "remaining_time": "5:53:51"}
|
70 |
+
{"current_steps": 70, "total_steps": 426, "loss": 0.5122, "lr": 9.88672288129908e-06, "epoch": 0.4918032786885246, "percentage": 16.43, "elapsed_time": "1:09:28", "remaining_time": "5:53:21"}
|
71 |
+
{"current_steps": 71, "total_steps": 426, "loss": 0.5768, "lr": 9.877878011413924e-06, "epoch": 0.49882903981264637, "percentage": 16.67, "elapsed_time": "1:10:28", "remaining_time": "5:52:22"}
|
72 |
+
{"current_steps": 72, "total_steps": 426, "loss": 0.5522, "lr": 9.868704947483134e-06, "epoch": 0.5058548009367682, "percentage": 16.9, "elapsed_time": "1:11:31", "remaining_time": "5:51:41"}
|
73 |
+
{"current_steps": 73, "total_steps": 426, "loss": 0.5652, "lr": 9.859204306690038e-06, "epoch": 0.5128805620608899, "percentage": 17.14, "elapsed_time": "1:12:29", "remaining_time": "5:50:34"}
|
74 |
+
{"current_steps": 74, "total_steps": 426, "loss": 0.5292, "lr": 9.849376728258024e-06, "epoch": 0.5199063231850117, "percentage": 17.37, "elapsed_time": "1:13:32", "remaining_time": "5:49:49"}
|
75 |
+
{"current_steps": 75, "total_steps": 426, "loss": 0.5303, "lr": 9.839222873407553e-06, "epoch": 0.5269320843091335, "percentage": 17.61, "elapsed_time": "1:14:35", "remaining_time": "5:49:05"}
|
76 |
+
{"current_steps": 76, "total_steps": 426, "loss": 0.5033, "lr": 9.828743425311654e-06, "epoch": 0.5339578454332553, "percentage": 17.84, "elapsed_time": "1:15:35", "remaining_time": "5:48:07"}
|
77 |
+
{"current_steps": 77, "total_steps": 426, "loss": 0.4858, "lr": 9.817939089049964e-06, "epoch": 0.5409836065573771, "percentage": 18.08, "elapsed_time": "1:16:33", "remaining_time": "5:47:02"}
|
78 |
+
{"current_steps": 78, "total_steps": 426, "loss": 0.5326, "lr": 9.806810591561295e-06, "epoch": 0.5480093676814989, "percentage": 18.31, "elapsed_time": "1:17:33", "remaining_time": "5:46:01"}
|
79 |
+
{"current_steps": 79, "total_steps": 426, "loss": 0.5686, "lr": 9.795358681594712e-06, "epoch": 0.5550351288056206, "percentage": 18.54, "elapsed_time": "1:18:29", "remaining_time": "5:44:47"}
|
80 |
+
{"current_steps": 80, "total_steps": 426, "loss": 0.5446, "lr": 9.783584129659162e-06, "epoch": 0.5620608899297423, "percentage": 18.78, "elapsed_time": "1:19:31", "remaining_time": "5:43:56"}
|
81 |
+
{"current_steps": 81, "total_steps": 426, "loss": 0.5278, "lr": 9.771487727971642e-06, "epoch": 0.5690866510538641, "percentage": 19.01, "elapsed_time": "1:20:32", "remaining_time": "5:43:03"}
|
82 |
+
{"current_steps": 82, "total_steps": 426, "loss": 0.5276, "lr": 9.759070290403873e-06, "epoch": 0.5761124121779859, "percentage": 19.25, "elapsed_time": "1:21:34", "remaining_time": "5:42:12"}
|
83 |
+
{"current_steps": 83, "total_steps": 426, "loss": 0.5577, "lr": 9.746332652427566e-06, "epoch": 0.5831381733021077, "percentage": 19.48, "elapsed_time": "1:22:25", "remaining_time": "5:40:38"}
|
84 |
+
{"current_steps": 84, "total_steps": 426, "loss": 0.5362, "lr": 9.733275671058195e-06, "epoch": 0.5901639344262295, "percentage": 19.72, "elapsed_time": "1:23:24", "remaining_time": "5:39:35"}
|
85 |
+
{"current_steps": 85, "total_steps": 426, "loss": 0.5188, "lr": 9.71990022479734e-06, "epoch": 0.5971896955503513, "percentage": 19.95, "elapsed_time": "1:24:24", "remaining_time": "5:38:37"}
|
86 |
+
{"current_steps": 86, "total_steps": 426, "loss": 0.5184, "lr": 9.70620721357358e-06, "epoch": 0.6042154566744731, "percentage": 20.19, "elapsed_time": "1:25:24", "remaining_time": "5:37:40"}
|
87 |
+
{"current_steps": 87, "total_steps": 426, "loss": 0.5525, "lr": 9.69219755868194e-06, "epoch": 0.6112412177985949, "percentage": 20.42, "elapsed_time": "1:26:22", "remaining_time": "5:36:35"}
|
88 |
+
{"current_steps": 88, "total_steps": 426, "loss": 0.6103, "lr": 9.677872202721906e-06, "epoch": 0.6182669789227166, "percentage": 20.66, "elapsed_time": "1:27:24", "remaining_time": "5:35:44"}
|
89 |
+
{"current_steps": 89, "total_steps": 426, "loss": 0.4937, "lr": 9.663232109534011e-06, "epoch": 0.6252927400468384, "percentage": 20.89, "elapsed_time": "1:28:23", "remaining_time": "5:34:41"}
|
90 |
+
{"current_steps": 90, "total_steps": 426, "loss": 0.4613, "lr": 9.648278264134977e-06, "epoch": 0.6323185011709602, "percentage": 21.13, "elapsed_time": "1:29:20", "remaining_time": "5:33:34"}
|
91 |
+
{"current_steps": 91, "total_steps": 426, "loss": 0.5421, "lr": 9.633011672651443e-06, "epoch": 0.639344262295082, "percentage": 21.36, "elapsed_time": "1:30:20", "remaining_time": "5:32:35"}
|
92 |
+
{"current_steps": 92, "total_steps": 426, "loss": 0.5133, "lr": 9.617433362252277e-06, "epoch": 0.6463700234192038, "percentage": 21.6, "elapsed_time": "1:31:18", "remaining_time": "5:31:30"}
|
93 |
+
{"current_steps": 93, "total_steps": 426, "loss": 0.5665, "lr": 9.601544381079457e-06, "epoch": 0.6533957845433255, "percentage": 21.83, "elapsed_time": "1:32:16", "remaining_time": "5:30:25"}
|
94 |
+
{"current_steps": 94, "total_steps": 426, "loss": 0.4941, "lr": 9.585345798177557e-06, "epoch": 0.6604215456674473, "percentage": 22.07, "elapsed_time": "1:33:10", "remaining_time": "5:29:05"}
|
95 |
+
{"current_steps": 95, "total_steps": 426, "loss": 0.4759, "lr": 9.56883870342181e-06, "epoch": 0.667447306791569, "percentage": 22.3, "elapsed_time": "1:34:07", "remaining_time": "5:27:57"}
|
96 |
+
{"current_steps": 96, "total_steps": 426, "loss": 0.5644, "lr": 9.552024207444794e-06, "epoch": 0.6744730679156908, "percentage": 22.54, "elapsed_time": "1:35:06", "remaining_time": "5:26:55"}
|
97 |
+
{"current_steps": 97, "total_steps": 426, "loss": 0.5615, "lr": 9.534903441561693e-06, "epoch": 0.6814988290398126, "percentage": 22.77, "elapsed_time": "1:36:05", "remaining_time": "5:25:54"}
|
98 |
+
{"current_steps": 98, "total_steps": 426, "loss": 0.5735, "lr": 9.517477557694182e-06, "epoch": 0.6885245901639344, "percentage": 23.0, "elapsed_time": "1:37:01", "remaining_time": "5:24:43"}
|
99 |
+
{"current_steps": 99, "total_steps": 426, "loss": 0.4838, "lr": 9.499747728292928e-06, "epoch": 0.6955503512880562, "percentage": 23.24, "elapsed_time": "1:38:02", "remaining_time": "5:23:50"}
|
100 |
+
{"current_steps": 100, "total_steps": 426, "loss": 0.583, "lr": 9.481715146258699e-06, "epoch": 0.702576112412178, "percentage": 23.47, "elapsed_time": "1:39:03", "remaining_time": "5:22:55"}
|
101 |
+
{"current_steps": 101, "total_steps": 426, "loss": 0.5629, "lr": 9.463381024862116e-06, "epoch": 0.7096018735362998, "percentage": 23.71, "elapsed_time": "1:40:01", "remaining_time": "5:21:52"}
|
102 |
+
{"current_steps": 102, "total_steps": 426, "loss": 0.5186, "lr": 9.444746597662e-06, "epoch": 0.7166276346604216, "percentage": 23.94, "elapsed_time": "1:40:57", "remaining_time": "5:20:41"}
|
103 |
+
{"current_steps": 103, "total_steps": 426, "loss": 0.5929, "lr": 9.425813118422393e-06, "epoch": 0.7236533957845434, "percentage": 24.18, "elapsed_time": "1:41:56", "remaining_time": "5:19:41"}
|
104 |
+
{"current_steps": 104, "total_steps": 426, "loss": 0.4721, "lr": 9.406581861028199e-06, "epoch": 0.7306791569086651, "percentage": 24.41, "elapsed_time": "1:42:49", "remaining_time": "5:18:21"}
|
105 |
+
{"current_steps": 105, "total_steps": 426, "loss": 0.5086, "lr": 9.387054119399466e-06, "epoch": 0.7377049180327869, "percentage": 24.65, "elapsed_time": "1:43:47", "remaining_time": "5:17:16"}
|
106 |
+
{"current_steps": 106, "total_steps": 426, "loss": 0.5382, "lr": 9.36723120740434e-06, "epoch": 0.7447306791569087, "percentage": 24.88, "elapsed_time": "1:44:48", "remaining_time": "5:16:23"}
|
107 |
+
{"current_steps": 107, "total_steps": 426, "loss": 0.514, "lr": 9.347114458770656e-06, "epoch": 0.7517564402810304, "percentage": 25.12, "elapsed_time": "1:45:48", "remaining_time": "5:15:27"}
|
108 |
+
{"current_steps": 108, "total_steps": 426, "loss": 0.5179, "lr": 9.326705226996207e-06, "epoch": 0.7587822014051522, "percentage": 25.35, "elapsed_time": "1:46:50", "remaining_time": "5:14:36"}
|
109 |
+
{"current_steps": 109, "total_steps": 426, "loss": 0.5395, "lr": 9.306004885257675e-06, "epoch": 0.765807962529274, "percentage": 25.59, "elapsed_time": "1:47:50", "remaining_time": "5:13:37"}
|
110 |
+
{"current_steps": 110, "total_steps": 426, "loss": 0.5406, "lr": 9.28501482631824e-06, "epoch": 0.7728337236533958, "percentage": 25.82, "elapsed_time": "1:48:52", "remaining_time": "5:12:45"}
|
111 |
+
{"current_steps": 111, "total_steps": 426, "loss": 0.5313, "lr": 9.26373646243388e-06, "epoch": 0.7798594847775175, "percentage": 26.06, "elapsed_time": "1:49:53", "remaining_time": "5:11:52"}
|
112 |
+
{"current_steps": 112, "total_steps": 426, "loss": 0.498, "lr": 9.242171225258336e-06, "epoch": 0.7868852459016393, "percentage": 26.29, "elapsed_time": "1:50:51", "remaining_time": "5:10:49"}
|
113 |
+
{"current_steps": 113, "total_steps": 426, "loss": 0.5263, "lr": 9.220320565746806e-06, "epoch": 0.7939110070257611, "percentage": 26.53, "elapsed_time": "1:51:52", "remaining_time": "5:09:53"}
|
114 |
+
{"current_steps": 114, "total_steps": 426, "loss": 0.4941, "lr": 9.198185954058305e-06, "epoch": 0.8009367681498829, "percentage": 26.76, "elapsed_time": "1:52:53", "remaining_time": "5:08:58"}
|
115 |
+
{"current_steps": 115, "total_steps": 426, "loss": 0.4953, "lr": 9.175768879456759e-06, "epoch": 0.8079625292740047, "percentage": 27.0, "elapsed_time": "1:53:49", "remaining_time": "5:07:49"}
|
116 |
+
{"current_steps": 116, "total_steps": 426, "loss": 0.4857, "lr": 9.153070850210803e-06, "epoch": 0.8149882903981265, "percentage": 27.23, "elapsed_time": "1:54:50", "remaining_time": "5:06:53"}
|
117 |
+
{"current_steps": 117, "total_steps": 426, "loss": 0.508, "lr": 9.130093393492302e-06, "epoch": 0.8220140515222483, "percentage": 27.46, "elapsed_time": "1:55:50", "remaining_time": "5:05:55"}
|
118 |
+
{"current_steps": 118, "total_steps": 426, "loss": 0.5354, "lr": 9.106838055273589e-06, "epoch": 0.8290398126463701, "percentage": 27.7, "elapsed_time": "1:56:46", "remaining_time": "5:04:48"}
|
119 |
+
{"current_steps": 119, "total_steps": 426, "loss": 0.5021, "lr": 9.083306400223465e-06, "epoch": 0.8360655737704918, "percentage": 27.93, "elapsed_time": "1:57:50", "remaining_time": "5:04:01"}
|
120 |
+
{"current_steps": 120, "total_steps": 426, "loss": 0.4905, "lr": 9.059500011601919e-06, "epoch": 0.8430913348946136, "percentage": 28.17, "elapsed_time": "1:58:50", "remaining_time": "5:03:02"}
|
121 |
+
{"current_steps": 121, "total_steps": 426, "loss": 0.5299, "lr": 9.035420491153596e-06, "epoch": 0.8501170960187353, "percentage": 28.4, "elapsed_time": "1:59:47", "remaining_time": "5:01:56"}
|
122 |
+
{"current_steps": 122, "total_steps": 426, "loss": 0.4801, "lr": 9.011069459000035e-06, "epoch": 0.8571428571428571, "percentage": 28.64, "elapsed_time": "2:00:45", "remaining_time": "5:00:53"}
|
123 |
+
{"current_steps": 123, "total_steps": 426, "loss": 0.492, "lr": 8.986448553530665e-06, "epoch": 0.8641686182669789, "percentage": 28.87, "elapsed_time": "2:01:42", "remaining_time": "4:59:49"}
|
124 |
+
{"current_steps": 124, "total_steps": 426, "loss": 0.4774, "lr": 8.961559431292562e-06, "epoch": 0.8711943793911007, "percentage": 29.11, "elapsed_time": "2:02:42", "remaining_time": "4:58:51"}
|
125 |
+
{"current_steps": 125, "total_steps": 426, "loss": 0.4615, "lr": 8.936403766879003e-06, "epoch": 0.8782201405152225, "percentage": 29.34, "elapsed_time": "2:03:33", "remaining_time": "4:57:30"}
|
126 |
+
{"current_steps": 126, "total_steps": 426, "loss": 0.4974, "lr": 8.910983252816794e-06, "epoch": 0.8852459016393442, "percentage": 29.58, "elapsed_time": "2:04:31", "remaining_time": "4:56:28"}
|
127 |
+
{"current_steps": 127, "total_steps": 426, "loss": 0.496, "lr": 8.885299599452381e-06, "epoch": 0.892271662763466, "percentage": 29.81, "elapsed_time": "2:05:29", "remaining_time": "4:55:27"}
|
128 |
+
{"current_steps": 128, "total_steps": 426, "loss": 0.4637, "lr": 8.859354534836797e-06, "epoch": 0.8992974238875878, "percentage": 30.05, "elapsed_time": "2:06:29", "remaining_time": "4:54:29"}
|
129 |
+
{"current_steps": 129, "total_steps": 426, "loss": 0.5046, "lr": 8.833149804609372e-06, "epoch": 0.9063231850117096, "percentage": 30.28, "elapsed_time": "2:07:25", "remaining_time": "4:53:22"}
|
130 |
+
{"current_steps": 130, "total_steps": 426, "loss": 0.49, "lr": 8.806687171880298e-06, "epoch": 0.9133489461358314, "percentage": 30.52, "elapsed_time": "2:08:25", "remaining_time": "4:52:24"}
|
131 |
+
{"current_steps": 131, "total_steps": 426, "loss": 0.5804, "lr": 8.779968417111991e-06, "epoch": 0.9203747072599532, "percentage": 30.75, "elapsed_time": "2:09:23", "remaining_time": "4:51:22"}
|
132 |
+
{"current_steps": 132, "total_steps": 426, "loss": 0.5149, "lr": 8.752995337999316e-06, "epoch": 0.927400468384075, "percentage": 30.99, "elapsed_time": "2:10:27", "remaining_time": "4:50:34"}
|
133 |
+
{"current_steps": 133, "total_steps": 426, "loss": 0.5149, "lr": 8.725769749348612e-06, "epoch": 0.9344262295081968, "percentage": 31.22, "elapsed_time": "2:11:26", "remaining_time": "4:49:34"}
|
134 |
+
{"current_steps": 134, "total_steps": 426, "loss": 0.5291, "lr": 8.698293482955605e-06, "epoch": 0.9414519906323185, "percentage": 31.46, "elapsed_time": "2:12:22", "remaining_time": "4:48:27"}
|
135 |
+
{"current_steps": 135, "total_steps": 426, "loss": 0.5072, "lr": 8.670568387482153e-06, "epoch": 0.9484777517564403, "percentage": 31.69, "elapsed_time": "2:13:19", "remaining_time": "4:47:24"}
|
136 |
+
{"current_steps": 136, "total_steps": 426, "loss": 0.4863, "lr": 8.642596328331864e-06, "epoch": 0.955503512880562, "percentage": 31.92, "elapsed_time": "2:14:10", "remaining_time": "4:46:07"}
|
137 |
+
{"current_steps": 137, "total_steps": 426, "loss": 0.4843, "lr": 8.614379187524593e-06, "epoch": 0.9625292740046838, "percentage": 32.16, "elapsed_time": "2:15:09", "remaining_time": "4:45:06"}
|
138 |
+
{"current_steps": 138, "total_steps": 426, "loss": 0.5084, "lr": 8.585918863569806e-06, "epoch": 0.9695550351288056, "percentage": 32.39, "elapsed_time": "2:16:08", "remaining_time": "4:44:07"}
|
139 |
+
{"current_steps": 139, "total_steps": 426, "loss": 0.5292, "lr": 8.55721727133886e-06, "epoch": 0.9765807962529274, "percentage": 32.63, "elapsed_time": "2:17:08", "remaining_time": "4:43:09"}
|
140 |
+
{"current_steps": 140, "total_steps": 426, "loss": 0.499, "lr": 8.528276341936146e-06, "epoch": 0.9836065573770492, "percentage": 32.86, "elapsed_time": "2:18:09", "remaining_time": "4:42:13"}
|
141 |
+
{"current_steps": 141, "total_steps": 426, "loss": 0.5519, "lr": 8.499098022569177e-06, "epoch": 0.990632318501171, "percentage": 33.1, "elapsed_time": "2:19:08", "remaining_time": "4:41:14"}
|
142 |
+
{"current_steps": 142, "total_steps": 426, "loss": 0.493, "lr": 8.469684276417568e-06, "epoch": 0.9976580796252927, "percentage": 33.33, "elapsed_time": "2:20:08", "remaining_time": "4:40:17"}
|
143 |
+
{"current_steps": 143, "total_steps": 426, "loss": 0.5449, "lr": 8.440037082500953e-06, "epoch": 1.0, "percentage": 33.57, "elapsed_time": "2:20:20", "remaining_time": "4:37:44"}
|
144 |
+
{"current_steps": 144, "total_steps": 426, "loss": 0.3758, "lr": 8.410158435545825e-06, "epoch": 1.0070257611241218, "percentage": 33.8, "elapsed_time": "2:21:26", "remaining_time": "4:36:59"}
|
145 |
+
{"current_steps": 145, "total_steps": 426, "loss": 0.3874, "lr": 8.380050345851338e-06, "epoch": 1.0140515222482436, "percentage": 34.04, "elapsed_time": "2:22:27", "remaining_time": "4:36:04"}
|
146 |
+
{"current_steps": 146, "total_steps": 426, "loss": 0.3925, "lr": 8.349714839154035e-06, "epoch": 1.0210772833723654, "percentage": 34.27, "elapsed_time": "2:23:28", "remaining_time": "4:35:09"}
|
147 |
+
{"current_steps": 147, "total_steps": 426, "loss": 0.3708, "lr": 8.319153956491567e-06, "epoch": 1.0281030444964872, "percentage": 34.51, "elapsed_time": "2:24:22", "remaining_time": "4:34:00"}
|
148 |
+
{"current_steps": 148, "total_steps": 426, "loss": 0.4101, "lr": 8.288369754065362e-06, "epoch": 1.035128805620609, "percentage": 34.74, "elapsed_time": "2:25:21", "remaining_time": "4:33:01"}
|
149 |
+
{"current_steps": 149, "total_steps": 426, "loss": 0.3982, "lr": 8.257364303102275e-06, "epoch": 1.0421545667447307, "percentage": 34.98, "elapsed_time": "2:26:30", "remaining_time": "4:32:22"}
|
150 |
+
{"current_steps": 150, "total_steps": 426, "loss": 0.4387, "lr": 8.226139689715233e-06, "epoch": 1.0491803278688525, "percentage": 35.21, "elapsed_time": "2:27:30", "remaining_time": "4:31:25"}
|
151 |
+
{"current_steps": 151, "total_steps": 426, "loss": 0.3351, "lr": 8.19469801476288e-06, "epoch": 1.0562060889929743, "percentage": 35.45, "elapsed_time": "2:28:27", "remaining_time": "4:30:22"}
|
152 |
+
{"current_steps": 152, "total_steps": 426, "loss": 0.4093, "lr": 8.16304139370823e-06, "epoch": 1.063231850117096, "percentage": 35.68, "elapsed_time": "2:29:27", "remaining_time": "4:29:24"}
|
153 |
+
{"current_steps": 153, "total_steps": 426, "loss": 0.3781, "lr": 8.131171956476328e-06, "epoch": 1.0702576112412179, "percentage": 35.92, "elapsed_time": "2:30:28", "remaining_time": "4:28:30"}
|
154 |
+
{"current_steps": 154, "total_steps": 426, "loss": 0.3705, "lr": 8.09909184731094e-06, "epoch": 1.0772833723653397, "percentage": 36.15, "elapsed_time": "2:31:28", "remaining_time": "4:27:31"}
|
155 |
+
{"current_steps": 155, "total_steps": 426, "loss": 0.4005, "lr": 8.066803224630295e-06, "epoch": 1.0843091334894615, "percentage": 36.38, "elapsed_time": "2:32:31", "remaining_time": "4:26:40"}
|
156 |
+
{"current_steps": 156, "total_steps": 426, "loss": 0.3681, "lr": 8.034308260881854e-06, "epoch": 1.0913348946135832, "percentage": 36.62, "elapsed_time": "2:33:30", "remaining_time": "4:25:41"}
|
157 |
+
{"current_steps": 157, "total_steps": 426, "loss": 0.4282, "lr": 8.00160914239615e-06, "epoch": 1.098360655737705, "percentage": 36.85, "elapsed_time": "2:34:25", "remaining_time": "4:24:35"}
|
158 |
+
{"current_steps": 158, "total_steps": 426, "loss": 0.4118, "lr": 7.968708069239672e-06, "epoch": 1.1053864168618266, "percentage": 37.09, "elapsed_time": "2:35:26", "remaining_time": "4:23:39"}
|
159 |
+
{"current_steps": 159, "total_steps": 426, "loss": 0.3738, "lr": 7.935607255066867e-06, "epoch": 1.1124121779859484, "percentage": 37.32, "elapsed_time": "2:36:25", "remaining_time": "4:22:40"}
|
160 |
+
{"current_steps": 160, "total_steps": 426, "loss": 0.3967, "lr": 7.902308926971166e-06, "epoch": 1.1194379391100702, "percentage": 37.56, "elapsed_time": "2:37:22", "remaining_time": "4:21:37"}
|
161 |
+
{"current_steps": 161, "total_steps": 426, "loss": 0.3859, "lr": 7.868815325335168e-06, "epoch": 1.126463700234192, "percentage": 37.79, "elapsed_time": "2:38:21", "remaining_time": "4:20:38"}
|
162 |
+
{"current_steps": 162, "total_steps": 426, "loss": 0.3819, "lr": 7.835128703679896e-06, "epoch": 1.1334894613583137, "percentage": 38.03, "elapsed_time": "2:39:18", "remaining_time": "4:19:36"}
|
163 |
+
{"current_steps": 163, "total_steps": 426, "loss": 0.369, "lr": 7.801251328513164e-06, "epoch": 1.1405152224824355, "percentage": 38.26, "elapsed_time": "2:40:18", "remaining_time": "4:18:39"}
|
164 |
+
{"current_steps": 164, "total_steps": 426, "loss": 0.3937, "lr": 7.767185479177092e-06, "epoch": 1.1475409836065573, "percentage": 38.5, "elapsed_time": "2:41:15", "remaining_time": "4:17:37"}
|
165 |
+
{"current_steps": 165, "total_steps": 426, "loss": 0.3615, "lr": 7.732933447694748e-06, "epoch": 1.154566744730679, "percentage": 38.73, "elapsed_time": "2:42:14", "remaining_time": "4:16:37"}
|
166 |
+
{"current_steps": 166, "total_steps": 426, "loss": 0.3609, "lr": 7.698497538615928e-06, "epoch": 1.161592505854801, "percentage": 38.97, "elapsed_time": "2:43:15", "remaining_time": "4:15:42"}
|
167 |
+
{"current_steps": 167, "total_steps": 426, "loss": 0.3906, "lr": 7.663880068862106e-06, "epoch": 1.1686182669789227, "percentage": 39.2, "elapsed_time": "2:44:12", "remaining_time": "4:14:40"}
|
168 |
+
{"current_steps": 168, "total_steps": 426, "loss": 0.3896, "lr": 7.629083367570547e-06, "epoch": 1.1756440281030445, "percentage": 39.44, "elapsed_time": "2:45:03", "remaining_time": "4:13:28"}
|
169 |
+
{"current_steps": 169, "total_steps": 426, "loss": 0.3945, "lr": 7.594109775937595e-06, "epoch": 1.1826697892271663, "percentage": 39.67, "elapsed_time": "2:45:59", "remaining_time": "4:12:26"}
|
170 |
+
{"current_steps": 170, "total_steps": 426, "loss": 0.3352, "lr": 7.558961647061156e-06, "epoch": 1.189695550351288, "percentage": 39.91, "elapsed_time": "2:46:59", "remaining_time": "4:11:28"}
|
171 |
+
{"current_steps": 171, "total_steps": 426, "loss": 0.3503, "lr": 7.5236413457823745e-06, "epoch": 1.1967213114754098, "percentage": 40.14, "elapsed_time": "2:47:59", "remaining_time": "4:10:30"}
|
172 |
+
{"current_steps": 172, "total_steps": 426, "loss": 0.3538, "lr": 7.488151248526518e-06, "epoch": 1.2037470725995316, "percentage": 40.38, "elapsed_time": "2:49:01", "remaining_time": "4:09:36"}
|
173 |
+
{"current_steps": 173, "total_steps": 426, "loss": 0.4321, "lr": 7.452493743143092e-06, "epoch": 1.2107728337236534, "percentage": 40.61, "elapsed_time": "2:50:05", "remaining_time": "4:08:44"}
|
174 |
+
{"current_steps": 174, "total_steps": 426, "loss": 0.3621, "lr": 7.416671228745181e-06, "epoch": 1.2177985948477752, "percentage": 40.85, "elapsed_time": "2:51:06", "remaining_time": "4:07:48"}
|
175 |
+
{"current_steps": 175, "total_steps": 426, "loss": 0.3761, "lr": 7.380686115548024e-06, "epoch": 1.224824355971897, "percentage": 41.08, "elapsed_time": "2:52:06", "remaining_time": "4:06:51"}
|
176 |
+
{"current_steps": 176, "total_steps": 426, "loss": 0.3883, "lr": 7.344540824706855e-06, "epoch": 1.2318501170960188, "percentage": 41.31, "elapsed_time": "2:53:06", "remaining_time": "4:05:54"}
|
177 |
+
{"current_steps": 177, "total_steps": 426, "loss": 0.3823, "lr": 7.3082377881540025e-06, "epoch": 1.2388758782201406, "percentage": 41.55, "elapsed_time": "2:54:04", "remaining_time": "4:04:53"}
|
178 |
+
{"current_steps": 178, "total_steps": 426, "loss": 0.3878, "lr": 7.271779448435265e-06, "epoch": 1.2459016393442623, "percentage": 41.78, "elapsed_time": "2:55:01", "remaining_time": "4:03:51"}
|
179 |
+
{"current_steps": 179, "total_steps": 426, "loss": 0.396, "lr": 7.235168258545569e-06, "epoch": 1.2529274004683841, "percentage": 42.02, "elapsed_time": "2:56:17", "remaining_time": "4:03:15"}
|
180 |
+
{"current_steps": 180, "total_steps": 426, "loss": 0.4016, "lr": 7.198406681763925e-06, "epoch": 1.259953161592506, "percentage": 42.25, "elapsed_time": "2:57:19", "remaining_time": "4:02:20"}
|
181 |
+
{"current_steps": 181, "total_steps": 426, "loss": 0.3753, "lr": 7.161497191487693e-06, "epoch": 1.2669789227166277, "percentage": 42.49, "elapsed_time": "2:58:26", "remaining_time": "4:01:32"}
|
182 |
+
{"current_steps": 182, "total_steps": 426, "loss": 0.3785, "lr": 7.124442271066174e-06, "epoch": 1.2740046838407495, "percentage": 42.72, "elapsed_time": "2:59:26", "remaining_time": "4:00:34"}
|
183 |
+
{"current_steps": 183, "total_steps": 426, "loss": 0.3865, "lr": 7.087244413633516e-06, "epoch": 1.281030444964871, "percentage": 42.96, "elapsed_time": "3:00:25", "remaining_time": "3:59:35"}
|
184 |
+
{"current_steps": 184, "total_steps": 426, "loss": 0.3672, "lr": 7.049906121940974e-06, "epoch": 1.288056206088993, "percentage": 43.19, "elapsed_time": "3:01:22", "remaining_time": "3:58:33"}
|
185 |
+
{"current_steps": 185, "total_steps": 426, "loss": 0.3426, "lr": 7.012429908188523e-06, "epoch": 1.2950819672131146, "percentage": 43.43, "elapsed_time": "3:02:20", "remaining_time": "3:57:32"}
|
186 |
+
{"current_steps": 186, "total_steps": 426, "loss": 0.4294, "lr": 6.9748182938558225e-06, "epoch": 1.3021077283372366, "percentage": 43.66, "elapsed_time": "3:03:21", "remaining_time": "3:56:35"}
|
187 |
+
{"current_steps": 187, "total_steps": 426, "loss": 0.3471, "lr": 6.937073809532581e-06, "epoch": 1.3091334894613582, "percentage": 43.9, "elapsed_time": "3:04:18", "remaining_time": "3:55:33"}
|
188 |
+
{"current_steps": 188, "total_steps": 426, "loss": 0.3278, "lr": 6.899198994748274e-06, "epoch": 1.3161592505854802, "percentage": 44.13, "elapsed_time": "3:05:09", "remaining_time": "3:54:23"}
|
189 |
+
{"current_steps": 189, "total_steps": 426, "loss": 0.3667, "lr": 6.861196397801297e-06, "epoch": 1.3231850117096018, "percentage": 44.37, "elapsed_time": "3:06:12", "remaining_time": "3:53:30"}
|
190 |
+
{"current_steps": 190, "total_steps": 426, "loss": 0.3489, "lr": 6.823068575587496e-06, "epoch": 1.3302107728337236, "percentage": 44.6, "elapsed_time": "3:07:08", "remaining_time": "3:52:26"}
|
191 |
+
{"current_steps": 191, "total_steps": 426, "loss": 0.3779, "lr": 6.784818093428144e-06, "epoch": 1.3372365339578454, "percentage": 44.84, "elapsed_time": "3:08:05", "remaining_time": "3:51:25"}
|
192 |
+
{"current_steps": 192, "total_steps": 426, "loss": 0.3771, "lr": 6.746447524897335e-06, "epoch": 1.3442622950819672, "percentage": 45.07, "elapsed_time": "3:09:02", "remaining_time": "3:50:23"}
|
193 |
+
{"current_steps": 193, "total_steps": 426, "loss": 0.3814, "lr": 6.70795945164883e-06, "epoch": 1.351288056206089, "percentage": 45.31, "elapsed_time": "3:09:59", "remaining_time": "3:49:22"}
|
194 |
+
{"current_steps": 194, "total_steps": 426, "loss": 0.3562, "lr": 6.6693564632423626e-06, "epoch": 1.3583138173302107, "percentage": 45.54, "elapsed_time": "3:10:57", "remaining_time": "3:48:22"}
|
195 |
+
{"current_steps": 195, "total_steps": 426, "loss": 0.3702, "lr": 6.630641156969397e-06, "epoch": 1.3653395784543325, "percentage": 45.77, "elapsed_time": "3:12:02", "remaining_time": "3:47:29"}
|
196 |
+
{"current_steps": 196, "total_steps": 426, "loss": 0.392, "lr": 6.591816137678388e-06, "epoch": 1.3723653395784543, "percentage": 46.01, "elapsed_time": "3:12:58", "remaining_time": "3:46:27"}
|
197 |
+
{"current_steps": 197, "total_steps": 426, "loss": 0.4089, "lr": 6.552884017599517e-06, "epoch": 1.379391100702576, "percentage": 46.24, "elapsed_time": "3:13:56", "remaining_time": "3:45:26"}
|
198 |
+
{"current_steps": 198, "total_steps": 426, "loss": 0.3266, "lr": 6.513847416168929e-06, "epoch": 1.3864168618266979, "percentage": 46.48, "elapsed_time": "3:14:55", "remaining_time": "3:44:27"}
|
199 |
+
{"current_steps": 199, "total_steps": 426, "loss": 0.347, "lr": 6.474708959852504e-06, "epoch": 1.3934426229508197, "percentage": 46.71, "elapsed_time": "3:15:47", "remaining_time": "3:43:20"}
|
200 |
+
{"current_steps": 200, "total_steps": 426, "loss": 0.3696, "lr": 6.435471281969133e-06, "epoch": 1.4004683840749415, "percentage": 46.95, "elapsed_time": "3:16:42", "remaining_time": "3:42:17"}
|
201 |
+
{"current_steps": 201, "total_steps": 426, "loss": 0.3881, "lr": 6.396137022513545e-06, "epoch": 1.4074941451990632, "percentage": 47.18, "elapsed_time": "3:20:13", "remaining_time": "3:44:08"}
|
202 |
+
{"current_steps": 202, "total_steps": 426, "loss": 0.3878, "lr": 6.3567088279786885e-06, "epoch": 1.414519906323185, "percentage": 47.42, "elapsed_time": "3:21:12", "remaining_time": "3:43:06"}
|
203 |
+
{"current_steps": 203, "total_steps": 426, "loss": 0.3853, "lr": 6.317189351177657e-06, "epoch": 1.4215456674473068, "percentage": 47.65, "elapsed_time": "3:22:10", "remaining_time": "3:42:05"}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f341362c92fab13c38e92c742b6573740fe3f6445ca06cc44b8a1c2ad53e5c33
|
3 |
+
size 8184
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|