Training in progress, step 200
Browse files- .gitattributes +1 -0
- added_tokens.json +24 -0
- config.json +28 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +346 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +209 -0
- trainer_log.jsonl +201 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"</tool_call>": 151658,
|
3 |
+
"<tool_call>": 151657,
|
4 |
+
"<|box_end|>": 151649,
|
5 |
+
"<|box_start|>": 151648,
|
6 |
+
"<|endoftext|>": 151643,
|
7 |
+
"<|file_sep|>": 151664,
|
8 |
+
"<|fim_middle|>": 151660,
|
9 |
+
"<|fim_pad|>": 151662,
|
10 |
+
"<|fim_prefix|>": 151659,
|
11 |
+
"<|fim_suffix|>": 151661,
|
12 |
+
"<|im_end|>": 151645,
|
13 |
+
"<|im_start|>": 151644,
|
14 |
+
"<|image_pad|>": 151655,
|
15 |
+
"<|object_ref_end|>": 151647,
|
16 |
+
"<|object_ref_start|>": 151646,
|
17 |
+
"<|quad_end|>": 151651,
|
18 |
+
"<|quad_start|>": 151650,
|
19 |
+
"<|repo_name|>": 151663,
|
20 |
+
"<|video_pad|>": 151656,
|
21 |
+
"<|vision_end|>": 151653,
|
22 |
+
"<|vision_pad|>": 151654,
|
23 |
+
"<|vision_start|>": 151652
|
24 |
+
}
|
config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"Qwen2ForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_dropout": 0.0,
|
6 |
+
"bos_token_id": 151643,
|
7 |
+
"eos_token_id": 151645,
|
8 |
+
"hidden_act": "silu",
|
9 |
+
"hidden_size": 3584,
|
10 |
+
"initializer_range": 0.02,
|
11 |
+
"intermediate_size": 18944,
|
12 |
+
"max_position_embeddings": 32768,
|
13 |
+
"max_window_layers": 28,
|
14 |
+
"model_type": "qwen2",
|
15 |
+
"num_attention_heads": 28,
|
16 |
+
"num_hidden_layers": 28,
|
17 |
+
"num_key_value_heads": 4,
|
18 |
+
"rms_norm_eps": 1e-06,
|
19 |
+
"rope_scaling": null,
|
20 |
+
"rope_theta": 1000000.0,
|
21 |
+
"sliding_window": 131072,
|
22 |
+
"tie_word_embeddings": false,
|
23 |
+
"torch_dtype": "bfloat16",
|
24 |
+
"transformers_version": "4.51.2",
|
25 |
+
"use_cache": false,
|
26 |
+
"use_sliding_window": false,
|
27 |
+
"vocab_size": 152064
|
28 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model-00001-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fa913089ffc1f4fa3c1d1d87e268bbe7b476625ec706ba8b702c9a661652f8e6
|
3 |
+
size 4877660776
|
model-00002-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3eec48c999cbd71f5b77f3a04f79cec58540c1f9f9c05f2f28332a35556d8af9
|
3 |
+
size 4932751008
|
model-00003-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9d89f87a708c5599753643759ca37acd2f94b92ec6d960101305926b394c79a9
|
3 |
+
size 4330865200
|
model-00004-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:384971a8fcd71f6c4d896e937e040ce19fd613fdbe4db08c9edfd5c4af152a8f
|
3 |
+
size 1089994880
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 15231233024
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
92 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
104 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
116 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
212 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
224 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
236 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
248 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
320 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
332 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
344 |
+
"model.norm.weight": "model-00003-of-00004.safetensors"
|
345 |
+
}
|
346 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|im_start|>",
|
4 |
+
"<|im_end|>",
|
5 |
+
"<|object_ref_start|>",
|
6 |
+
"<|object_ref_end|>",
|
7 |
+
"<|box_start|>",
|
8 |
+
"<|box_end|>",
|
9 |
+
"<|quad_start|>",
|
10 |
+
"<|quad_end|>",
|
11 |
+
"<|vision_start|>",
|
12 |
+
"<|vision_end|>",
|
13 |
+
"<|vision_pad|>",
|
14 |
+
"<|image_pad|>",
|
15 |
+
"<|video_pad|>"
|
16 |
+
],
|
17 |
+
"eos_token": {
|
18 |
+
"content": "<|im_end|>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"pad_token": {
|
25 |
+
"content": "<|endoftext|>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
3 |
+
size 11421896
|
tokenizer_config.json
ADDED
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": false,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"151643": {
|
6 |
+
"content": "<|endoftext|>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"151644": {
|
14 |
+
"content": "<|im_start|>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"151645": {
|
22 |
+
"content": "<|im_end|>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"151646": {
|
30 |
+
"content": "<|object_ref_start|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"151647": {
|
38 |
+
"content": "<|object_ref_end|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
"151648": {
|
46 |
+
"content": "<|box_start|>",
|
47 |
+
"lstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"rstrip": false,
|
50 |
+
"single_word": false,
|
51 |
+
"special": true
|
52 |
+
},
|
53 |
+
"151649": {
|
54 |
+
"content": "<|box_end|>",
|
55 |
+
"lstrip": false,
|
56 |
+
"normalized": false,
|
57 |
+
"rstrip": false,
|
58 |
+
"single_word": false,
|
59 |
+
"special": true
|
60 |
+
},
|
61 |
+
"151650": {
|
62 |
+
"content": "<|quad_start|>",
|
63 |
+
"lstrip": false,
|
64 |
+
"normalized": false,
|
65 |
+
"rstrip": false,
|
66 |
+
"single_word": false,
|
67 |
+
"special": true
|
68 |
+
},
|
69 |
+
"151651": {
|
70 |
+
"content": "<|quad_end|>",
|
71 |
+
"lstrip": false,
|
72 |
+
"normalized": false,
|
73 |
+
"rstrip": false,
|
74 |
+
"single_word": false,
|
75 |
+
"special": true
|
76 |
+
},
|
77 |
+
"151652": {
|
78 |
+
"content": "<|vision_start|>",
|
79 |
+
"lstrip": false,
|
80 |
+
"normalized": false,
|
81 |
+
"rstrip": false,
|
82 |
+
"single_word": false,
|
83 |
+
"special": true
|
84 |
+
},
|
85 |
+
"151653": {
|
86 |
+
"content": "<|vision_end|>",
|
87 |
+
"lstrip": false,
|
88 |
+
"normalized": false,
|
89 |
+
"rstrip": false,
|
90 |
+
"single_word": false,
|
91 |
+
"special": true
|
92 |
+
},
|
93 |
+
"151654": {
|
94 |
+
"content": "<|vision_pad|>",
|
95 |
+
"lstrip": false,
|
96 |
+
"normalized": false,
|
97 |
+
"rstrip": false,
|
98 |
+
"single_word": false,
|
99 |
+
"special": true
|
100 |
+
},
|
101 |
+
"151655": {
|
102 |
+
"content": "<|image_pad|>",
|
103 |
+
"lstrip": false,
|
104 |
+
"normalized": false,
|
105 |
+
"rstrip": false,
|
106 |
+
"single_word": false,
|
107 |
+
"special": true
|
108 |
+
},
|
109 |
+
"151656": {
|
110 |
+
"content": "<|video_pad|>",
|
111 |
+
"lstrip": false,
|
112 |
+
"normalized": false,
|
113 |
+
"rstrip": false,
|
114 |
+
"single_word": false,
|
115 |
+
"special": true
|
116 |
+
},
|
117 |
+
"151657": {
|
118 |
+
"content": "<tool_call>",
|
119 |
+
"lstrip": false,
|
120 |
+
"normalized": false,
|
121 |
+
"rstrip": false,
|
122 |
+
"single_word": false,
|
123 |
+
"special": false
|
124 |
+
},
|
125 |
+
"151658": {
|
126 |
+
"content": "</tool_call>",
|
127 |
+
"lstrip": false,
|
128 |
+
"normalized": false,
|
129 |
+
"rstrip": false,
|
130 |
+
"single_word": false,
|
131 |
+
"special": false
|
132 |
+
},
|
133 |
+
"151659": {
|
134 |
+
"content": "<|fim_prefix|>",
|
135 |
+
"lstrip": false,
|
136 |
+
"normalized": false,
|
137 |
+
"rstrip": false,
|
138 |
+
"single_word": false,
|
139 |
+
"special": false
|
140 |
+
},
|
141 |
+
"151660": {
|
142 |
+
"content": "<|fim_middle|>",
|
143 |
+
"lstrip": false,
|
144 |
+
"normalized": false,
|
145 |
+
"rstrip": false,
|
146 |
+
"single_word": false,
|
147 |
+
"special": false
|
148 |
+
},
|
149 |
+
"151661": {
|
150 |
+
"content": "<|fim_suffix|>",
|
151 |
+
"lstrip": false,
|
152 |
+
"normalized": false,
|
153 |
+
"rstrip": false,
|
154 |
+
"single_word": false,
|
155 |
+
"special": false
|
156 |
+
},
|
157 |
+
"151662": {
|
158 |
+
"content": "<|fim_pad|>",
|
159 |
+
"lstrip": false,
|
160 |
+
"normalized": false,
|
161 |
+
"rstrip": false,
|
162 |
+
"single_word": false,
|
163 |
+
"special": false
|
164 |
+
},
|
165 |
+
"151663": {
|
166 |
+
"content": "<|repo_name|>",
|
167 |
+
"lstrip": false,
|
168 |
+
"normalized": false,
|
169 |
+
"rstrip": false,
|
170 |
+
"single_word": false,
|
171 |
+
"special": false
|
172 |
+
},
|
173 |
+
"151664": {
|
174 |
+
"content": "<|file_sep|>",
|
175 |
+
"lstrip": false,
|
176 |
+
"normalized": false,
|
177 |
+
"rstrip": false,
|
178 |
+
"single_word": false,
|
179 |
+
"special": false
|
180 |
+
}
|
181 |
+
},
|
182 |
+
"additional_special_tokens": [
|
183 |
+
"<|im_start|>",
|
184 |
+
"<|im_end|>",
|
185 |
+
"<|object_ref_start|>",
|
186 |
+
"<|object_ref_end|>",
|
187 |
+
"<|box_start|>",
|
188 |
+
"<|box_end|>",
|
189 |
+
"<|quad_start|>",
|
190 |
+
"<|quad_end|>",
|
191 |
+
"<|vision_start|>",
|
192 |
+
"<|vision_end|>",
|
193 |
+
"<|vision_pad|>",
|
194 |
+
"<|image_pad|>",
|
195 |
+
"<|video_pad|>"
|
196 |
+
],
|
197 |
+
"bos_token": null,
|
198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
199 |
+
"clean_up_tokenization_spaces": false,
|
200 |
+
"eos_token": "<|im_end|>",
|
201 |
+
"errors": "replace",
|
202 |
+
"extra_special_tokens": {},
|
203 |
+
"model_max_length": 131072,
|
204 |
+
"pad_token": "<|endoftext|>",
|
205 |
+
"padding_side": "right",
|
206 |
+
"split_special_tokens": false,
|
207 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
208 |
+
"unk_token": null
|
209 |
+
}
|
trainer_log.jsonl
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"current_steps": 1, "total_steps": 594, "loss": 1.7242, "lr": 0.0, "epoch": 0.005042016806722689, "percentage": 0.17, "elapsed_time": "0:02:29", "remaining_time": "1 day, 0:36:45"}
|
2 |
+
{"current_steps": 2, "total_steps": 594, "loss": 1.9603, "lr": 1.6666666666666668e-07, "epoch": 0.010084033613445379, "percentage": 0.34, "elapsed_time": "0:04:31", "remaining_time": "22:17:37"}
|
3 |
+
{"current_steps": 3, "total_steps": 594, "loss": 1.7815, "lr": 3.3333333333333335e-07, "epoch": 0.015126050420168067, "percentage": 0.51, "elapsed_time": "0:06:58", "remaining_time": "22:55:38"}
|
4 |
+
{"current_steps": 4, "total_steps": 594, "loss": 1.8671, "lr": 5.000000000000001e-07, "epoch": 0.020168067226890758, "percentage": 0.67, "elapsed_time": "0:08:46", "remaining_time": "21:34:03"}
|
5 |
+
{"current_steps": 5, "total_steps": 594, "loss": 1.8868, "lr": 6.666666666666667e-07, "epoch": 0.025210084033613446, "percentage": 0.84, "elapsed_time": "0:10:22", "remaining_time": "20:22:49"}
|
6 |
+
{"current_steps": 6, "total_steps": 594, "loss": 1.8821, "lr": 8.333333333333333e-07, "epoch": 0.030252100840336135, "percentage": 1.01, "elapsed_time": "0:12:15", "remaining_time": "20:00:57"}
|
7 |
+
{"current_steps": 7, "total_steps": 594, "loss": 1.8398, "lr": 1.0000000000000002e-06, "epoch": 0.03529411764705882, "percentage": 1.18, "elapsed_time": "0:14:02", "remaining_time": "19:38:06"}
|
8 |
+
{"current_steps": 8, "total_steps": 594, "loss": 1.7522, "lr": 1.1666666666666668e-06, "epoch": 0.040336134453781515, "percentage": 1.35, "elapsed_time": "0:15:25", "remaining_time": "18:49:51"}
|
9 |
+
{"current_steps": 9, "total_steps": 594, "loss": 1.7879, "lr": 1.3333333333333334e-06, "epoch": 0.0453781512605042, "percentage": 1.52, "elapsed_time": "0:17:19", "remaining_time": "18:46:00"}
|
10 |
+
{"current_steps": 10, "total_steps": 594, "loss": 1.8949, "lr": 1.5e-06, "epoch": 0.05042016806722689, "percentage": 1.68, "elapsed_time": "0:18:55", "remaining_time": "18:24:52"}
|
11 |
+
{"current_steps": 11, "total_steps": 594, "loss": 1.7568, "lr": 1.6666666666666667e-06, "epoch": 0.05546218487394958, "percentage": 1.85, "elapsed_time": "0:21:26", "remaining_time": "18:56:40"}
|
12 |
+
{"current_steps": 12, "total_steps": 594, "loss": 1.6243, "lr": 1.8333333333333333e-06, "epoch": 0.06050420168067227, "percentage": 2.02, "elapsed_time": "0:23:02", "remaining_time": "18:37:32"}
|
13 |
+
{"current_steps": 13, "total_steps": 594, "loss": 1.5973, "lr": 2.0000000000000003e-06, "epoch": 0.06554621848739496, "percentage": 2.19, "elapsed_time": "0:24:44", "remaining_time": "18:25:30"}
|
14 |
+
{"current_steps": 14, "total_steps": 594, "loss": 1.3774, "lr": 2.166666666666667e-06, "epoch": 0.07058823529411765, "percentage": 2.36, "elapsed_time": "0:26:20", "remaining_time": "18:11:03"}
|
15 |
+
{"current_steps": 15, "total_steps": 594, "loss": 1.5953, "lr": 2.3333333333333336e-06, "epoch": 0.07563025210084033, "percentage": 2.53, "elapsed_time": "0:28:19", "remaining_time": "18:13:38"}
|
16 |
+
{"current_steps": 16, "total_steps": 594, "loss": 1.3746, "lr": 2.5e-06, "epoch": 0.08067226890756303, "percentage": 2.69, "elapsed_time": "0:30:13", "remaining_time": "18:12:02"}
|
17 |
+
{"current_steps": 17, "total_steps": 594, "loss": 1.4698, "lr": 2.666666666666667e-06, "epoch": 0.08571428571428572, "percentage": 2.86, "elapsed_time": "0:32:00", "remaining_time": "18:06:37"}
|
18 |
+
{"current_steps": 18, "total_steps": 594, "loss": 1.4229, "lr": 2.8333333333333335e-06, "epoch": 0.0907563025210084, "percentage": 3.03, "elapsed_time": "0:33:53", "remaining_time": "18:04:34"}
|
19 |
+
{"current_steps": 19, "total_steps": 594, "loss": 1.3948, "lr": 3e-06, "epoch": 0.0957983193277311, "percentage": 3.2, "elapsed_time": "0:35:30", "remaining_time": "17:54:25"}
|
20 |
+
{"current_steps": 20, "total_steps": 594, "loss": 1.3286, "lr": 3.1666666666666667e-06, "epoch": 0.10084033613445378, "percentage": 3.37, "elapsed_time": "0:37:21", "remaining_time": "17:52:18"}
|
21 |
+
{"current_steps": 21, "total_steps": 594, "loss": 1.2574, "lr": 3.3333333333333333e-06, "epoch": 0.10588235294117647, "percentage": 3.54, "elapsed_time": "0:39:17", "remaining_time": "17:52:17"}
|
22 |
+
{"current_steps": 22, "total_steps": 594, "loss": 1.2994, "lr": 3.5e-06, "epoch": 0.11092436974789915, "percentage": 3.7, "elapsed_time": "0:41:04", "remaining_time": "17:48:09"}
|
23 |
+
{"current_steps": 23, "total_steps": 594, "loss": 1.271, "lr": 3.6666666666666666e-06, "epoch": 0.11596638655462185, "percentage": 3.87, "elapsed_time": "0:43:01", "remaining_time": "17:48:08"}
|
24 |
+
{"current_steps": 24, "total_steps": 594, "loss": 1.239, "lr": 3.833333333333334e-06, "epoch": 0.12100840336134454, "percentage": 4.04, "elapsed_time": "0:44:34", "remaining_time": "17:38:49"}
|
25 |
+
{"current_steps": 25, "total_steps": 594, "loss": 1.2632, "lr": 4.000000000000001e-06, "epoch": 0.12605042016806722, "percentage": 4.21, "elapsed_time": "0:46:44", "remaining_time": "17:43:43"}
|
26 |
+
{"current_steps": 26, "total_steps": 594, "loss": 1.2494, "lr": 4.166666666666667e-06, "epoch": 0.13109243697478992, "percentage": 4.38, "elapsed_time": "0:48:33", "remaining_time": "17:40:42"}
|
27 |
+
{"current_steps": 27, "total_steps": 594, "loss": 1.1772, "lr": 4.333333333333334e-06, "epoch": 0.1361344537815126, "percentage": 4.55, "elapsed_time": "0:50:17", "remaining_time": "17:36:00"}
|
28 |
+
{"current_steps": 28, "total_steps": 594, "loss": 1.3323, "lr": 4.5e-06, "epoch": 0.1411764705882353, "percentage": 4.71, "elapsed_time": "0:51:56", "remaining_time": "17:30:03"}
|
29 |
+
{"current_steps": 29, "total_steps": 594, "loss": 1.123, "lr": 4.666666666666667e-06, "epoch": 0.146218487394958, "percentage": 4.88, "elapsed_time": "0:53:55", "remaining_time": "17:30:30"}
|
30 |
+
{"current_steps": 30, "total_steps": 594, "loss": 1.1116, "lr": 4.833333333333333e-06, "epoch": 0.15126050420168066, "percentage": 5.05, "elapsed_time": "0:55:42", "remaining_time": "17:27:24"}
|
31 |
+
{"current_steps": 31, "total_steps": 594, "loss": 1.2038, "lr": 5e-06, "epoch": 0.15630252100840336, "percentage": 5.22, "elapsed_time": "0:57:35", "remaining_time": "17:25:54"}
|
32 |
+
{"current_steps": 32, "total_steps": 594, "loss": 1.2171, "lr": 5.1666666666666675e-06, "epoch": 0.16134453781512606, "percentage": 5.39, "elapsed_time": "0:59:16", "remaining_time": "17:21:05"}
|
33 |
+
{"current_steps": 33, "total_steps": 594, "loss": 1.0193, "lr": 5.333333333333334e-06, "epoch": 0.16638655462184873, "percentage": 5.56, "elapsed_time": "1:00:25", "remaining_time": "17:07:14"}
|
34 |
+
{"current_steps": 34, "total_steps": 594, "loss": 1.0761, "lr": 5.500000000000001e-06, "epoch": 0.17142857142857143, "percentage": 5.72, "elapsed_time": "1:02:37", "remaining_time": "17:11:24"}
|
35 |
+
{"current_steps": 35, "total_steps": 594, "loss": 1.1126, "lr": 5.666666666666667e-06, "epoch": 0.17647058823529413, "percentage": 5.89, "elapsed_time": "1:05:00", "remaining_time": "17:18:18"}
|
36 |
+
{"current_steps": 36, "total_steps": 594, "loss": 1.1203, "lr": 5.833333333333334e-06, "epoch": 0.1815126050420168, "percentage": 6.06, "elapsed_time": "1:06:50", "remaining_time": "17:15:58"}
|
37 |
+
{"current_steps": 37, "total_steps": 594, "loss": 1.0293, "lr": 6e-06, "epoch": 0.1865546218487395, "percentage": 6.23, "elapsed_time": "1:08:42", "remaining_time": "17:14:15"}
|
38 |
+
{"current_steps": 38, "total_steps": 594, "loss": 0.995, "lr": 6.166666666666667e-06, "epoch": 0.1915966386554622, "percentage": 6.4, "elapsed_time": "1:10:15", "remaining_time": "17:08:03"}
|
39 |
+
{"current_steps": 39, "total_steps": 594, "loss": 1.1051, "lr": 6.333333333333333e-06, "epoch": 0.19663865546218487, "percentage": 6.57, "elapsed_time": "1:11:38", "remaining_time": "16:59:36"}
|
40 |
+
{"current_steps": 40, "total_steps": 594, "loss": 1.0817, "lr": 6.5000000000000004e-06, "epoch": 0.20168067226890757, "percentage": 6.73, "elapsed_time": "1:13:33", "remaining_time": "16:58:46"}
|
41 |
+
{"current_steps": 41, "total_steps": 594, "loss": 0.9949, "lr": 6.666666666666667e-06, "epoch": 0.20672268907563024, "percentage": 6.9, "elapsed_time": "1:15:15", "remaining_time": "16:55:07"}
|
42 |
+
{"current_steps": 42, "total_steps": 594, "loss": 1.0369, "lr": 6.833333333333334e-06, "epoch": 0.21176470588235294, "percentage": 7.07, "elapsed_time": "1:17:12", "remaining_time": "16:54:37"}
|
43 |
+
{"current_steps": 43, "total_steps": 594, "loss": 1.0126, "lr": 7e-06, "epoch": 0.21680672268907564, "percentage": 7.24, "elapsed_time": "1:18:50", "remaining_time": "16:50:18"}
|
44 |
+
{"current_steps": 44, "total_steps": 594, "loss": 0.9829, "lr": 7.166666666666667e-06, "epoch": 0.2218487394957983, "percentage": 7.41, "elapsed_time": "1:20:25", "remaining_time": "16:45:17"}
|
45 |
+
{"current_steps": 45, "total_steps": 594, "loss": 0.9432, "lr": 7.333333333333333e-06, "epoch": 0.226890756302521, "percentage": 7.58, "elapsed_time": "1:22:17", "remaining_time": "16:44:00"}
|
46 |
+
{"current_steps": 46, "total_steps": 594, "loss": 0.9518, "lr": 7.500000000000001e-06, "epoch": 0.2319327731092437, "percentage": 7.74, "elapsed_time": "1:23:40", "remaining_time": "16:36:46"}
|
47 |
+
{"current_steps": 47, "total_steps": 594, "loss": 1.0235, "lr": 7.666666666666667e-06, "epoch": 0.23697478991596638, "percentage": 7.91, "elapsed_time": "1:24:54", "remaining_time": "16:28:07"}
|
48 |
+
{"current_steps": 48, "total_steps": 594, "loss": 0.9567, "lr": 7.833333333333333e-06, "epoch": 0.24201680672268908, "percentage": 8.08, "elapsed_time": "1:27:05", "remaining_time": "16:30:34"}
|
49 |
+
{"current_steps": 49, "total_steps": 594, "loss": 1.0417, "lr": 8.000000000000001e-06, "epoch": 0.24705882352941178, "percentage": 8.25, "elapsed_time": "1:28:45", "remaining_time": "16:27:13"}
|
50 |
+
{"current_steps": 50, "total_steps": 594, "loss": 1.0347, "lr": 8.166666666666668e-06, "epoch": 0.25210084033613445, "percentage": 8.42, "elapsed_time": "1:30:34", "remaining_time": "16:25:23"}
|
51 |
+
{"current_steps": 51, "total_steps": 594, "loss": 0.9524, "lr": 8.333333333333334e-06, "epoch": 0.2571428571428571, "percentage": 8.59, "elapsed_time": "1:32:28", "remaining_time": "16:24:31"}
|
52 |
+
{"current_steps": 52, "total_steps": 594, "loss": 1.0684, "lr": 8.5e-06, "epoch": 0.26218487394957984, "percentage": 8.75, "elapsed_time": "1:33:51", "remaining_time": "16:18:21"}
|
53 |
+
{"current_steps": 53, "total_steps": 594, "loss": 0.9501, "lr": 8.666666666666668e-06, "epoch": 0.2672268907563025, "percentage": 8.92, "elapsed_time": "1:35:17", "remaining_time": "16:12:38"}
|
54 |
+
{"current_steps": 54, "total_steps": 594, "loss": 0.9452, "lr": 8.833333333333334e-06, "epoch": 0.2722689075630252, "percentage": 9.09, "elapsed_time": "1:37:33", "remaining_time": "16:15:33"}
|
55 |
+
{"current_steps": 55, "total_steps": 594, "loss": 0.9243, "lr": 9e-06, "epoch": 0.2773109243697479, "percentage": 9.26, "elapsed_time": "1:39:03", "remaining_time": "16:10:48"}
|
56 |
+
{"current_steps": 56, "total_steps": 594, "loss": 0.9577, "lr": 9.166666666666666e-06, "epoch": 0.2823529411764706, "percentage": 9.43, "elapsed_time": "1:40:24", "remaining_time": "16:04:35"}
|
57 |
+
{"current_steps": 57, "total_steps": 594, "loss": 0.9216, "lr": 9.333333333333334e-06, "epoch": 0.28739495798319326, "percentage": 9.6, "elapsed_time": "1:41:50", "remaining_time": "15:59:29"}
|
58 |
+
{"current_steps": 58, "total_steps": 594, "loss": 0.986, "lr": 9.5e-06, "epoch": 0.292436974789916, "percentage": 9.76, "elapsed_time": "1:43:54", "remaining_time": "16:00:10"}
|
59 |
+
{"current_steps": 59, "total_steps": 594, "loss": 1.0163, "lr": 9.666666666666667e-06, "epoch": 0.29747899159663865, "percentage": 9.93, "elapsed_time": "1:45:25", "remaining_time": "15:56:01"}
|
60 |
+
{"current_steps": 60, "total_steps": 594, "loss": 0.9071, "lr": 9.833333333333333e-06, "epoch": 0.3025210084033613, "percentage": 10.1, "elapsed_time": "1:46:46", "remaining_time": "15:50:21"}
|
61 |
+
{"current_steps": 61, "total_steps": 594, "loss": 1.0532, "lr": 1e-05, "epoch": 0.30756302521008405, "percentage": 10.27, "elapsed_time": "1:48:57", "remaining_time": "15:52:03"}
|
62 |
+
{"current_steps": 62, "total_steps": 594, "loss": 0.9497, "lr": 9.999913472135126e-06, "epoch": 0.3126050420168067, "percentage": 10.44, "elapsed_time": "1:50:33", "remaining_time": "15:48:43"}
|
63 |
+
{"current_steps": 63, "total_steps": 594, "loss": 0.9656, "lr": 9.99965389153533e-06, "epoch": 0.3176470588235294, "percentage": 10.61, "elapsed_time": "1:51:54", "remaining_time": "15:43:17"}
|
64 |
+
{"current_steps": 64, "total_steps": 594, "loss": 0.8563, "lr": 9.999221267184993e-06, "epoch": 0.3226890756302521, "percentage": 10.77, "elapsed_time": "1:53:50", "remaining_time": "15:42:42"}
|
65 |
+
{"current_steps": 65, "total_steps": 594, "loss": 0.9743, "lr": 9.998615614057743e-06, "epoch": 0.3277310924369748, "percentage": 10.94, "elapsed_time": "1:55:43", "remaining_time": "15:41:47"}
|
66 |
+
{"current_steps": 66, "total_steps": 594, "loss": 0.8256, "lr": 9.997836953115927e-06, "epoch": 0.33277310924369746, "percentage": 11.11, "elapsed_time": "1:57:27", "remaining_time": "15:39:38"}
|
67 |
+
{"current_steps": 67, "total_steps": 594, "loss": 0.9112, "lr": 9.996885311309892e-06, "epoch": 0.3378151260504202, "percentage": 11.28, "elapsed_time": "1:58:48", "remaining_time": "15:34:33"}
|
68 |
+
{"current_steps": 68, "total_steps": 594, "loss": 1.0031, "lr": 9.995760721577053e-06, "epoch": 0.34285714285714286, "percentage": 11.45, "elapsed_time": "2:00:50", "remaining_time": "15:34:42"}
|
69 |
+
{"current_steps": 69, "total_steps": 594, "loss": 0.8777, "lr": 9.994463222840748e-06, "epoch": 0.34789915966386553, "percentage": 11.62, "elapsed_time": "2:02:23", "remaining_time": "15:31:13"}
|
70 |
+
{"current_steps": 70, "total_steps": 594, "loss": 0.9503, "lr": 9.992992860008893e-06, "epoch": 0.35294117647058826, "percentage": 11.78, "elapsed_time": "2:04:02", "remaining_time": "15:28:35"}
|
71 |
+
{"current_steps": 71, "total_steps": 594, "loss": 0.9707, "lr": 9.991349683972435e-06, "epoch": 0.35798319327731093, "percentage": 11.95, "elapsed_time": "2:05:50", "remaining_time": "15:26:55"}
|
72 |
+
{"current_steps": 72, "total_steps": 594, "loss": 0.8987, "lr": 9.989533751603578e-06, "epoch": 0.3630252100840336, "percentage": 12.12, "elapsed_time": "2:07:47", "remaining_time": "15:26:28"}
|
73 |
+
{"current_steps": 73, "total_steps": 594, "loss": 0.9614, "lr": 9.987545125753818e-06, "epoch": 0.3680672268907563, "percentage": 12.29, "elapsed_time": "2:09:03", "remaining_time": "15:21:06"}
|
74 |
+
{"current_steps": 74, "total_steps": 594, "loss": 0.9101, "lr": 9.985383875251783e-06, "epoch": 0.373109243697479, "percentage": 12.46, "elapsed_time": "2:11:00", "remaining_time": "15:20:35"}
|
75 |
+
{"current_steps": 75, "total_steps": 594, "loss": 0.8901, "lr": 9.983050074900824e-06, "epoch": 0.37815126050420167, "percentage": 12.63, "elapsed_time": "2:12:58", "remaining_time": "15:20:11"}
|
76 |
+
{"current_steps": 76, "total_steps": 594, "loss": 0.9305, "lr": 9.980543805476447e-06, "epoch": 0.3831932773109244, "percentage": 12.79, "elapsed_time": "2:14:22", "remaining_time": "15:15:52"}
|
77 |
+
{"current_steps": 77, "total_steps": 594, "loss": 0.9145, "lr": 9.977865153723508e-06, "epoch": 0.38823529411764707, "percentage": 12.96, "elapsed_time": "2:15:59", "remaining_time": "15:13:02"}
|
78 |
+
{"current_steps": 78, "total_steps": 594, "loss": 0.9386, "lr": 9.975014212353212e-06, "epoch": 0.39327731092436974, "percentage": 13.13, "elapsed_time": "2:17:38", "remaining_time": "15:10:34"}
|
79 |
+
{"current_steps": 79, "total_steps": 594, "loss": 0.9072, "lr": 9.971991080039912e-06, "epoch": 0.3983193277310924, "percentage": 13.3, "elapsed_time": "2:19:49", "remaining_time": "15:11:30"}
|
80 |
+
{"current_steps": 80, "total_steps": 594, "loss": 0.8538, "lr": 9.968795861417676e-06, "epoch": 0.40336134453781514, "percentage": 13.47, "elapsed_time": "2:21:54", "remaining_time": "15:11:45"}
|
81 |
+
{"current_steps": 81, "total_steps": 594, "loss": 0.8625, "lr": 9.965428667076687e-06, "epoch": 0.4084033613445378, "percentage": 13.64, "elapsed_time": "2:23:32", "remaining_time": "15:09:03"}
|
82 |
+
{"current_steps": 82, "total_steps": 594, "loss": 0.8002, "lr": 9.961889613559396e-06, "epoch": 0.4134453781512605, "percentage": 13.8, "elapsed_time": "2:25:29", "remaining_time": "15:08:27"}
|
83 |
+
{"current_steps": 83, "total_steps": 594, "loss": 0.9563, "lr": 9.958178823356503e-06, "epoch": 0.4184873949579832, "percentage": 13.97, "elapsed_time": "2:27:23", "remaining_time": "15:07:27"}
|
84 |
+
{"current_steps": 84, "total_steps": 594, "loss": 0.9009, "lr": 9.954296424902709e-06, "epoch": 0.4235294117647059, "percentage": 14.14, "elapsed_time": "2:30:03", "remaining_time": "15:11:03"}
|
85 |
+
{"current_steps": 85, "total_steps": 594, "loss": 0.8489, "lr": 9.950242552572272e-06, "epoch": 0.42857142857142855, "percentage": 14.31, "elapsed_time": "2:31:29", "remaining_time": "15:07:11"}
|
86 |
+
{"current_steps": 86, "total_steps": 594, "loss": 0.847, "lr": 9.946017346674362e-06, "epoch": 0.4336134453781513, "percentage": 14.48, "elapsed_time": "2:33:36", "remaining_time": "15:07:20"}
|
87 |
+
{"current_steps": 87, "total_steps": 594, "loss": 0.9382, "lr": 9.941620953448195e-06, "epoch": 0.43865546218487395, "percentage": 14.65, "elapsed_time": "2:35:30", "remaining_time": "15:06:16"}
|
88 |
+
{"current_steps": 88, "total_steps": 594, "loss": 0.8991, "lr": 9.937053525057977e-06, "epoch": 0.4436974789915966, "percentage": 14.81, "elapsed_time": "2:37:53", "remaining_time": "15:07:51"}
|
89 |
+
{"current_steps": 89, "total_steps": 594, "loss": 0.8611, "lr": 9.932315219587641e-06, "epoch": 0.44873949579831934, "percentage": 14.98, "elapsed_time": "2:39:50", "remaining_time": "15:06:56"}
|
90 |
+
{"current_steps": 90, "total_steps": 594, "loss": 0.8254, "lr": 9.927406201035368e-06, "epoch": 0.453781512605042, "percentage": 15.15, "elapsed_time": "2:41:48", "remaining_time": "15:06:05"}
|
91 |
+
{"current_steps": 91, "total_steps": 594, "loss": 0.8186, "lr": 9.922326639307918e-06, "epoch": 0.4588235294117647, "percentage": 15.32, "elapsed_time": "2:44:02", "remaining_time": "15:06:41"}
|
92 |
+
{"current_steps": 92, "total_steps": 594, "loss": 0.8217, "lr": 9.917076710214739e-06, "epoch": 0.4638655462184874, "percentage": 15.49, "elapsed_time": "2:45:34", "remaining_time": "15:03:26"}
|
93 |
+
{"current_steps": 93, "total_steps": 594, "loss": 0.9606, "lr": 9.911656595461899e-06, "epoch": 0.4689075630252101, "percentage": 15.66, "elapsed_time": "2:47:18", "remaining_time": "15:01:20"}
|
94 |
+
{"current_steps": 94, "total_steps": 594, "loss": 0.8865, "lr": 9.906066482645774e-06, "epoch": 0.47394957983193275, "percentage": 15.82, "elapsed_time": "2:49:39", "remaining_time": "15:02:26"}
|
95 |
+
{"current_steps": 95, "total_steps": 594, "loss": 0.8608, "lr": 9.900306565246579e-06, "epoch": 0.4789915966386555, "percentage": 15.99, "elapsed_time": "2:51:59", "remaining_time": "15:03:22"}
|
96 |
+
{"current_steps": 96, "total_steps": 594, "loss": 0.8476, "lr": 9.894377042621654e-06, "epoch": 0.48403361344537815, "percentage": 16.16, "elapsed_time": "2:53:55", "remaining_time": "15:02:13"}
|
97 |
+
{"current_steps": 97, "total_steps": 594, "loss": 0.898, "lr": 9.888278119998573e-06, "epoch": 0.4890756302521008, "percentage": 16.33, "elapsed_time": "2:55:55", "remaining_time": "15:01:22"}
|
98 |
+
{"current_steps": 98, "total_steps": 594, "loss": 0.9482, "lr": 9.882010008468038e-06, "epoch": 0.49411764705882355, "percentage": 16.5, "elapsed_time": "2:58:01", "remaining_time": "15:01:03"}
|
99 |
+
{"current_steps": 99, "total_steps": 594, "loss": 0.8932, "lr": 9.875572924976568e-06, "epoch": 0.4991596638655462, "percentage": 16.67, "elapsed_time": "2:59:43", "remaining_time": "14:58:37"}
|
100 |
+
{"current_steps": 100, "total_steps": 594, "loss": 0.9113, "lr": 9.868967092319003e-06, "epoch": 0.5042016806722689, "percentage": 16.84, "elapsed_time": "3:01:49", "remaining_time": "14:58:14"}
|
101 |
+
{"current_steps": 101, "total_steps": 594, "loss": 0.8373, "lr": 9.86219273913078e-06, "epoch": 0.5092436974789916, "percentage": 17.0, "elapsed_time": "3:03:39", "remaining_time": "14:56:29"}
|
102 |
+
{"current_steps": 102, "total_steps": 594, "loss": 0.82, "lr": 9.855250099880026e-06, "epoch": 0.5142857142857142, "percentage": 17.17, "elapsed_time": "3:05:29", "remaining_time": "14:54:42"}
|
103 |
+
{"current_steps": 103, "total_steps": 594, "loss": 0.9269, "lr": 9.848139414859441e-06, "epoch": 0.519327731092437, "percentage": 17.34, "elapsed_time": "3:07:23", "remaining_time": "14:53:15"}
|
104 |
+
{"current_steps": 104, "total_steps": 594, "loss": 0.8917, "lr": 9.840860930177984e-06, "epoch": 0.5243697478991597, "percentage": 17.51, "elapsed_time": "3:09:20", "remaining_time": "14:52:05"}
|
105 |
+
{"current_steps": 105, "total_steps": 594, "loss": 0.8242, "lr": 9.833414897752346e-06, "epoch": 0.5294117647058824, "percentage": 17.68, "elapsed_time": "3:11:00", "remaining_time": "14:49:34"}
|
106 |
+
{"current_steps": 106, "total_steps": 594, "loss": 0.8369, "lr": 9.825801575298248e-06, "epoch": 0.534453781512605, "percentage": 17.85, "elapsed_time": "3:12:41", "remaining_time": "14:47:07"}
|
107 |
+
{"current_steps": 107, "total_steps": 594, "loss": 0.8687, "lr": 9.818021226321502e-06, "epoch": 0.5394957983193277, "percentage": 18.01, "elapsed_time": "3:14:23", "remaining_time": "14:44:44"}
|
108 |
+
{"current_steps": 108, "total_steps": 594, "loss": 0.8698, "lr": 9.8100741201089e-06, "epoch": 0.5445378151260504, "percentage": 18.18, "elapsed_time": "3:15:44", "remaining_time": "14:40:48"}
|
109 |
+
{"current_steps": 109, "total_steps": 594, "loss": 0.9224, "lr": 9.801960531718898e-06, "epoch": 0.5495798319327732, "percentage": 18.35, "elapsed_time": "3:16:59", "remaining_time": "14:36:32"}
|
110 |
+
{"current_steps": 110, "total_steps": 594, "loss": 0.7909, "lr": 9.793680741972084e-06, "epoch": 0.5546218487394958, "percentage": 18.52, "elapsed_time": "3:18:40", "remaining_time": "14:34:08"}
|
111 |
+
{"current_steps": 111, "total_steps": 594, "loss": 0.8222, "lr": 9.785235037441473e-06, "epoch": 0.5596638655462185, "percentage": 18.69, "elapsed_time": "3:20:17", "remaining_time": "14:31:34"}
|
112 |
+
{"current_steps": 112, "total_steps": 594, "loss": 0.9698, "lr": 9.77662371044258e-06, "epoch": 0.5647058823529412, "percentage": 18.86, "elapsed_time": "3:22:03", "remaining_time": "14:29:34"}
|
113 |
+
{"current_steps": 113, "total_steps": 594, "loss": 0.8141, "lr": 9.767847059023292e-06, "epoch": 0.5697478991596638, "percentage": 19.02, "elapsed_time": "3:23:39", "remaining_time": "14:26:52"}
|
114 |
+
{"current_steps": 114, "total_steps": 594, "loss": 0.8281, "lr": 9.75890538695358e-06, "epoch": 0.5747899159663865, "percentage": 19.19, "elapsed_time": "3:25:26", "remaining_time": "14:25:02"}
|
115 |
+
{"current_steps": 115, "total_steps": 594, "loss": 0.8174, "lr": 9.749799003714954e-06, "epoch": 0.5798319327731093, "percentage": 19.36, "elapsed_time": "3:27:26", "remaining_time": "14:24:02"}
|
116 |
+
{"current_steps": 116, "total_steps": 594, "loss": 0.8662, "lr": 9.74052822448978e-06, "epoch": 0.584873949579832, "percentage": 19.53, "elapsed_time": "3:29:23", "remaining_time": "14:22:50"}
|
117 |
+
{"current_steps": 117, "total_steps": 594, "loss": 0.9227, "lr": 9.731093370150349e-06, "epoch": 0.5899159663865546, "percentage": 19.7, "elapsed_time": "3:30:57", "remaining_time": "14:20:04"}
|
118 |
+
{"current_steps": 118, "total_steps": 594, "loss": 0.9292, "lr": 9.721494767247779e-06, "epoch": 0.5949579831932773, "percentage": 19.87, "elapsed_time": "3:32:30", "remaining_time": "14:17:15"}
|
119 |
+
{"current_steps": 119, "total_steps": 594, "loss": 0.808, "lr": 9.71173274800072e-06, "epoch": 0.6, "percentage": 20.03, "elapsed_time": "3:34:25", "remaining_time": "14:15:52"}
|
120 |
+
{"current_steps": 120, "total_steps": 594, "loss": 0.8052, "lr": 9.70180765028384e-06, "epoch": 0.6050420168067226, "percentage": 20.2, "elapsed_time": "3:36:12", "remaining_time": "14:14:01"}
|
121 |
+
{"current_steps": 121, "total_steps": 594, "loss": 0.8321, "lr": 9.691719817616148e-06, "epoch": 0.6100840336134454, "percentage": 20.37, "elapsed_time": "3:37:34", "remaining_time": "14:10:31"}
|
122 |
+
{"current_steps": 122, "total_steps": 594, "loss": 0.8362, "lr": 9.681469599149093e-06, "epoch": 0.6151260504201681, "percentage": 20.54, "elapsed_time": "3:39:13", "remaining_time": "14:08:10"}
|
123 |
+
{"current_steps": 123, "total_steps": 594, "loss": 0.8753, "lr": 9.671057349654481e-06, "epoch": 0.6201680672268908, "percentage": 20.71, "elapsed_time": "3:40:48", "remaining_time": "14:05:33"}
|
124 |
+
{"current_steps": 124, "total_steps": 594, "loss": 0.8406, "lr": 9.660483429512198e-06, "epoch": 0.6252100840336134, "percentage": 20.88, "elapsed_time": "3:42:26", "remaining_time": "14:03:08"}
|
125 |
+
{"current_steps": 125, "total_steps": 594, "loss": 0.8096, "lr": 9.649748204697741e-06, "epoch": 0.6302521008403361, "percentage": 21.04, "elapsed_time": "3:43:51", "remaining_time": "13:59:54"}
|
126 |
+
{"current_steps": 126, "total_steps": 594, "loss": 0.9279, "lr": 9.63885204676954e-06, "epoch": 0.6352941176470588, "percentage": 21.21, "elapsed_time": "3:45:43", "remaining_time": "13:58:26"}
|
127 |
+
{"current_steps": 127, "total_steps": 594, "loss": 0.8668, "lr": 9.627795332856107e-06, "epoch": 0.6403361344537815, "percentage": 21.38, "elapsed_time": "3:47:39", "remaining_time": "13:57:06"}
|
128 |
+
{"current_steps": 128, "total_steps": 594, "loss": 0.8021, "lr": 9.616578445642982e-06, "epoch": 0.6453781512605042, "percentage": 21.55, "elapsed_time": "3:49:38", "remaining_time": "13:56:04"}
|
129 |
+
{"current_steps": 129, "total_steps": 594, "loss": 0.9031, "lr": 9.605201773359485e-06, "epoch": 0.6504201680672269, "percentage": 21.72, "elapsed_time": "3:51:26", "remaining_time": "13:54:15"}
|
130 |
+
{"current_steps": 130, "total_steps": 594, "loss": 0.9028, "lr": 9.59366570976528e-06, "epoch": 0.6554621848739496, "percentage": 21.89, "elapsed_time": "3:53:18", "remaining_time": "13:52:44"}
|
131 |
+
{"current_steps": 131, "total_steps": 594, "loss": 0.8206, "lr": 9.581970654136752e-06, "epoch": 0.6605042016806723, "percentage": 22.05, "elapsed_time": "3:55:28", "remaining_time": "13:52:14"}
|
132 |
+
{"current_steps": 132, "total_steps": 594, "loss": 0.8038, "lr": 9.570117011253173e-06, "epoch": 0.6655462184873949, "percentage": 22.22, "elapsed_time": "3:57:22", "remaining_time": "13:50:49"}
|
133 |
+
{"current_steps": 133, "total_steps": 594, "loss": 0.8594, "lr": 9.55810519138271e-06, "epoch": 0.6705882352941176, "percentage": 22.39, "elapsed_time": "3:59:22", "remaining_time": "13:49:41"}
|
134 |
+
{"current_steps": 134, "total_steps": 594, "loss": 0.8946, "lr": 9.545935610268213e-06, "epoch": 0.6756302521008404, "percentage": 22.56, "elapsed_time": "4:00:53", "remaining_time": "13:46:57"}
|
135 |
+
{"current_steps": 135, "total_steps": 594, "loss": 0.8747, "lr": 9.533608689112827e-06, "epoch": 0.680672268907563, "percentage": 22.73, "elapsed_time": "4:02:46", "remaining_time": "13:45:26"}
|
136 |
+
{"current_steps": 136, "total_steps": 594, "loss": 0.8665, "lr": 9.521124854565425e-06, "epoch": 0.6857142857142857, "percentage": 22.9, "elapsed_time": "4:04:43", "remaining_time": "13:44:08"}
|
137 |
+
{"current_steps": 137, "total_steps": 594, "loss": 0.8172, "lr": 9.508484538705823e-06, "epoch": 0.6907563025210084, "percentage": 23.06, "elapsed_time": "4:06:24", "remaining_time": "13:41:59"}
|
138 |
+
{"current_steps": 138, "total_steps": 594, "loss": 0.8159, "lr": 9.495688179029838e-06, "epoch": 0.6957983193277311, "percentage": 23.23, "elapsed_time": "4:07:48", "remaining_time": "13:38:51"}
|
139 |
+
{"current_steps": 139, "total_steps": 594, "loss": 0.772, "lr": 9.482736218434144e-06, "epoch": 0.7008403361344537, "percentage": 23.4, "elapsed_time": "4:09:56", "remaining_time": "13:38:07"}
|
140 |
+
{"current_steps": 140, "total_steps": 594, "loss": 0.812, "lr": 9.469629105200937e-06, "epoch": 0.7058823529411765, "percentage": 23.57, "elapsed_time": "4:11:39", "remaining_time": "13:36:06"}
|
141 |
+
{"current_steps": 141, "total_steps": 594, "loss": 0.9176, "lr": 9.45636729298243e-06, "epoch": 0.7109243697478992, "percentage": 23.74, "elapsed_time": "4:13:34", "remaining_time": "13:34:40"}
|
142 |
+
{"current_steps": 142, "total_steps": 594, "loss": 0.9227, "lr": 9.442951240785135e-06, "epoch": 0.7159663865546219, "percentage": 23.91, "elapsed_time": "4:15:19", "remaining_time": "13:32:43"}
|
143 |
+
{"current_steps": 143, "total_steps": 594, "loss": 0.8406, "lr": 9.429381412954e-06, "epoch": 0.7210084033613445, "percentage": 24.07, "elapsed_time": "4:17:19", "remaining_time": "13:31:32"}
|
144 |
+
{"current_steps": 144, "total_steps": 594, "loss": 0.7944, "lr": 9.415658279156312e-06, "epoch": 0.7260504201680672, "percentage": 24.24, "elapsed_time": "4:18:58", "remaining_time": "13:29:19"}
|
145 |
+
{"current_steps": 145, "total_steps": 594, "loss": 0.7889, "lr": 9.401782314365458e-06, "epoch": 0.7310924369747899, "percentage": 24.41, "elapsed_time": "4:20:47", "remaining_time": "13:27:31"}
|
146 |
+
{"current_steps": 146, "total_steps": 594, "loss": 0.8542, "lr": 9.387753998844482e-06, "epoch": 0.7361344537815127, "percentage": 24.58, "elapsed_time": "4:22:43", "remaining_time": "13:26:08"}
|
147 |
+
{"current_steps": 147, "total_steps": 594, "loss": 0.8713, "lr": 9.37357381812946e-06, "epoch": 0.7411764705882353, "percentage": 24.75, "elapsed_time": "4:24:48", "remaining_time": "13:25:14"}
|
148 |
+
{"current_steps": 148, "total_steps": 594, "loss": 0.8405, "lr": 9.359242263012693e-06, "epoch": 0.746218487394958, "percentage": 24.92, "elapsed_time": "4:26:17", "remaining_time": "13:22:27"}
|
149 |
+
{"current_steps": 149, "total_steps": 594, "loss": 0.8666, "lr": 9.344759829525734e-06, "epoch": 0.7512605042016807, "percentage": 25.08, "elapsed_time": "4:27:49", "remaining_time": "13:19:52"}
|
150 |
+
{"current_steps": 150, "total_steps": 594, "loss": 0.7429, "lr": 9.330127018922195e-06, "epoch": 0.7563025210084033, "percentage": 25.25, "elapsed_time": "4:29:09", "remaining_time": "13:16:42"}
|
151 |
+
{"current_steps": 151, "total_steps": 594, "loss": 0.8649, "lr": 9.315344337660422e-06, "epoch": 0.761344537815126, "percentage": 25.42, "elapsed_time": "4:30:57", "remaining_time": "13:14:56"}
|
152 |
+
{"current_steps": 152, "total_steps": 594, "loss": 0.8614, "lr": 9.300412297385954e-06, "epoch": 0.7663865546218488, "percentage": 25.59, "elapsed_time": "4:32:44", "remaining_time": "13:13:07"}
|
153 |
+
{"current_steps": 153, "total_steps": 594, "loss": 0.837, "lr": 9.285331414913816e-06, "epoch": 0.7714285714285715, "percentage": 25.76, "elapsed_time": "4:34:55", "remaining_time": "13:12:27"}
|
154 |
+
{"current_steps": 154, "total_steps": 594, "loss": 0.8404, "lr": 9.270102212210632e-06, "epoch": 0.7764705882352941, "percentage": 25.93, "elapsed_time": "4:36:04", "remaining_time": "13:08:48"}
|
155 |
+
{"current_steps": 155, "total_steps": 594, "loss": 0.9221, "lr": 9.254725216376562e-06, "epoch": 0.7815126050420168, "percentage": 26.09, "elapsed_time": "4:37:41", "remaining_time": "13:06:30"}
|
156 |
+
{"current_steps": 156, "total_steps": 594, "loss": 0.8627, "lr": 9.239200959627048e-06, "epoch": 0.7865546218487395, "percentage": 26.26, "elapsed_time": "4:39:10", "remaining_time": "13:03:50"}
|
157 |
+
{"current_steps": 157, "total_steps": 594, "loss": 0.8525, "lr": 9.223529979274411e-06, "epoch": 0.7915966386554621, "percentage": 26.43, "elapsed_time": "4:40:53", "remaining_time": "13:01:50"}
|
158 |
+
{"current_steps": 158, "total_steps": 594, "loss": 0.7901, "lr": 9.207712817709237e-06, "epoch": 0.7966386554621848, "percentage": 26.6, "elapsed_time": "4:42:49", "remaining_time": "13:00:28"}
|
159 |
+
{"current_steps": 159, "total_steps": 594, "loss": 0.865, "lr": 9.191750022381613e-06, "epoch": 0.8016806722689076, "percentage": 26.77, "elapsed_time": "4:44:34", "remaining_time": "12:58:32"}
|
160 |
+
{"current_steps": 160, "total_steps": 594, "loss": 0.7898, "lr": 9.175642145782179e-06, "epoch": 0.8067226890756303, "percentage": 26.94, "elapsed_time": "4:46:13", "remaining_time": "12:56:24"}
|
161 |
+
{"current_steps": 161, "total_steps": 594, "loss": 0.8372, "lr": 9.159389745423003e-06, "epoch": 0.8117647058823529, "percentage": 27.1, "elapsed_time": "4:48:25", "remaining_time": "12:55:41"}
|
162 |
+
{"current_steps": 162, "total_steps": 594, "loss": 0.8383, "lr": 9.142993383818284e-06, "epoch": 0.8168067226890756, "percentage": 27.27, "elapsed_time": "4:50:07", "remaining_time": "12:53:40"}
|
163 |
+
{"current_steps": 163, "total_steps": 594, "loss": 0.8151, "lr": 9.126453628464889e-06, "epoch": 0.8218487394957983, "percentage": 27.44, "elapsed_time": "4:51:35", "remaining_time": "12:51:02"}
|
164 |
+
{"current_steps": 164, "total_steps": 594, "loss": 0.8444, "lr": 9.109771051822702e-06, "epoch": 0.826890756302521, "percentage": 27.61, "elapsed_time": "4:53:09", "remaining_time": "12:48:38"}
|
165 |
+
{"current_steps": 165, "total_steps": 594, "loss": 0.8672, "lr": 9.09294623129482e-06, "epoch": 0.8319327731092437, "percentage": 27.78, "elapsed_time": "4:54:48", "remaining_time": "12:46:31"}
|
166 |
+
{"current_steps": 166, "total_steps": 594, "loss": 0.8168, "lr": 9.07597974920756e-06, "epoch": 0.8369747899159664, "percentage": 27.95, "elapsed_time": "4:56:13", "remaining_time": "12:43:45"}
|
167 |
+
{"current_steps": 167, "total_steps": 594, "loss": 0.9118, "lr": 9.058872192790314e-06, "epoch": 0.8420168067226891, "percentage": 28.11, "elapsed_time": "4:57:58", "remaining_time": "12:41:53"}
|
168 |
+
{"current_steps": 168, "total_steps": 594, "loss": 0.8515, "lr": 9.041624154155208e-06, "epoch": 0.8470588235294118, "percentage": 28.28, "elapsed_time": "4:59:32", "remaining_time": "12:39:32"}
|
169 |
+
{"current_steps": 169, "total_steps": 594, "loss": 0.7417, "lr": 9.02423623027663e-06, "epoch": 0.8521008403361344, "percentage": 28.45, "elapsed_time": "5:01:37", "remaining_time": "12:38:32"}
|
170 |
+
{"current_steps": 170, "total_steps": 594, "loss": 0.8408, "lr": 9.006709022970547e-06, "epoch": 0.8571428571428571, "percentage": 28.62, "elapsed_time": "5:03:09", "remaining_time": "12:36:06"}
|
171 |
+
{"current_steps": 171, "total_steps": 594, "loss": 0.7358, "lr": 8.98904313887369e-06, "epoch": 0.8621848739495799, "percentage": 28.79, "elapsed_time": "5:04:41", "remaining_time": "12:33:42"}
|
172 |
+
{"current_steps": 172, "total_steps": 594, "loss": 0.8322, "lr": 8.971239189422555e-06, "epoch": 0.8672268907563025, "percentage": 28.96, "elapsed_time": "5:06:17", "remaining_time": "12:31:29"}
|
173 |
+
{"current_steps": 173, "total_steps": 594, "loss": 0.8411, "lr": 8.953297790832231e-06, "epoch": 0.8722689075630252, "percentage": 29.12, "elapsed_time": "5:07:39", "remaining_time": "12:28:42"}
|
174 |
+
{"current_steps": 174, "total_steps": 594, "loss": 0.8036, "lr": 8.935219564075087e-06, "epoch": 0.8773109243697479, "percentage": 29.29, "elapsed_time": "5:09:55", "remaining_time": "12:28:05"}
|
175 |
+
{"current_steps": 175, "total_steps": 594, "loss": 0.8035, "lr": 8.917005134859263e-06, "epoch": 0.8823529411764706, "percentage": 29.46, "elapsed_time": "5:11:32", "remaining_time": "12:25:54"}
|
176 |
+
{"current_steps": 176, "total_steps": 594, "loss": 0.8392, "lr": 8.89865513360703e-06, "epoch": 0.8873949579831932, "percentage": 29.63, "elapsed_time": "5:13:20", "remaining_time": "12:24:11"}
|
177 |
+
{"current_steps": 177, "total_steps": 594, "loss": 0.8328, "lr": 8.88017019543296e-06, "epoch": 0.892436974789916, "percentage": 29.8, "elapsed_time": "5:15:25", "remaining_time": "12:23:07"}
|
178 |
+
{"current_steps": 178, "total_steps": 594, "loss": 0.8543, "lr": 8.861550960121946e-06, "epoch": 0.8974789915966387, "percentage": 29.97, "elapsed_time": "5:17:26", "remaining_time": "12:21:53"}
|
179 |
+
{"current_steps": 179, "total_steps": 594, "loss": 0.8512, "lr": 8.842798072107055e-06, "epoch": 0.9025210084033614, "percentage": 30.13, "elapsed_time": "5:19:32", "remaining_time": "12:20:50"}
|
180 |
+
{"current_steps": 180, "total_steps": 594, "loss": 0.8598, "lr": 8.823912180447237e-06, "epoch": 0.907563025210084, "percentage": 30.3, "elapsed_time": "5:21:13", "remaining_time": "12:18:49"}
|
181 |
+
{"current_steps": 181, "total_steps": 594, "loss": 0.8613, "lr": 8.804893938804839e-06, "epoch": 0.9126050420168067, "percentage": 30.47, "elapsed_time": "5:22:42", "remaining_time": "12:16:20"}
|
182 |
+
{"current_steps": 182, "total_steps": 594, "loss": 0.8192, "lr": 8.785744005423003e-06, "epoch": 0.9176470588235294, "percentage": 30.64, "elapsed_time": "5:24:15", "remaining_time": "12:14:02"}
|
183 |
+
{"current_steps": 183, "total_steps": 594, "loss": 0.8114, "lr": 8.766463043102864e-06, "epoch": 0.9226890756302522, "percentage": 30.81, "elapsed_time": "5:26:18", "remaining_time": "12:12:51"}
|
184 |
+
{"current_steps": 184, "total_steps": 594, "loss": 0.8922, "lr": 8.747051719180626e-06, "epoch": 0.9277310924369748, "percentage": 30.98, "elapsed_time": "5:28:23", "remaining_time": "12:11:44"}
|
185 |
+
{"current_steps": 185, "total_steps": 594, "loss": 0.8932, "lr": 8.727510705504453e-06, "epoch": 0.9327731092436975, "percentage": 31.14, "elapsed_time": "5:29:56", "remaining_time": "12:09:27"}
|
186 |
+
{"current_steps": 186, "total_steps": 594, "loss": 0.7998, "lr": 8.707840678411223e-06, "epoch": 0.9378151260504202, "percentage": 31.31, "elapsed_time": "5:31:41", "remaining_time": "12:07:35"}
|
187 |
+
{"current_steps": 187, "total_steps": 594, "loss": 0.7416, "lr": 8.688042318703111e-06, "epoch": 0.9428571428571428, "percentage": 31.48, "elapsed_time": "5:33:51", "remaining_time": "12:06:38"}
|
188 |
+
{"current_steps": 188, "total_steps": 594, "loss": 0.8685, "lr": 8.66811631162404e-06, "epoch": 0.9478991596638655, "percentage": 31.65, "elapsed_time": "5:35:46", "remaining_time": "12:05:06"}
|
189 |
+
{"current_steps": 189, "total_steps": 594, "loss": 0.8485, "lr": 8.648063346835943e-06, "epoch": 0.9529411764705882, "percentage": 31.82, "elapsed_time": "5:37:17", "remaining_time": "12:02:45"}
|
190 |
+
{"current_steps": 190, "total_steps": 594, "loss": 0.8286, "lr": 8.627884118394913e-06, "epoch": 0.957983193277311, "percentage": 31.99, "elapsed_time": "5:39:19", "remaining_time": "12:01:30"}
|
191 |
+
{"current_steps": 191, "total_steps": 594, "loss": 0.8544, "lr": 8.607579324727175e-06, "epoch": 0.9630252100840336, "percentage": 32.15, "elapsed_time": "5:40:50", "remaining_time": "11:59:08"}
|
192 |
+
{"current_steps": 192, "total_steps": 594, "loss": 0.8102, "lr": 8.5871496686049e-06, "epoch": 0.9680672268907563, "percentage": 32.32, "elapsed_time": "5:42:12", "remaining_time": "11:56:30"}
|
193 |
+
{"current_steps": 193, "total_steps": 594, "loss": 0.8122, "lr": 8.566595857121902e-06, "epoch": 0.973109243697479, "percentage": 32.49, "elapsed_time": "5:43:30", "remaining_time": "11:53:43"}
|
194 |
+
{"current_steps": 194, "total_steps": 594, "loss": 0.8834, "lr": 8.545918601669147e-06, "epoch": 0.9781512605042016, "percentage": 32.66, "elapsed_time": "5:45:11", "remaining_time": "11:51:44"}
|
195 |
+
{"current_steps": 195, "total_steps": 594, "loss": 0.8148, "lr": 8.525118617910144e-06, "epoch": 0.9831932773109243, "percentage": 32.83, "elapsed_time": "5:46:28", "remaining_time": "11:48:56"}
|
196 |
+
{"current_steps": 196, "total_steps": 594, "loss": 0.8271, "lr": 8.504196625756166e-06, "epoch": 0.9882352941176471, "percentage": 33.0, "elapsed_time": "5:48:19", "remaining_time": "11:47:19"}
|
197 |
+
{"current_steps": 197, "total_steps": 594, "loss": 0.845, "lr": 8.483153349341336e-06, "epoch": 0.9932773109243698, "percentage": 33.16, "elapsed_time": "5:50:46", "remaining_time": "11:46:52"}
|
198 |
+
{"current_steps": 198, "total_steps": 594, "loss": 0.8312, "lr": 8.461989516997565e-06, "epoch": 0.9983193277310924, "percentage": 33.33, "elapsed_time": "5:52:46", "remaining_time": "11:45:33"}
|
199 |
+
{"current_steps": 199, "total_steps": 594, "loss": 1.4381, "lr": 8.440705861229344e-06, "epoch": 1.0050420168067227, "percentage": 33.5, "elapsed_time": "5:54:59", "remaining_time": "11:44:38"}
|
200 |
+
{"current_steps": 200, "total_steps": 594, "loss": 0.713, "lr": 8.41930311868839e-06, "epoch": 1.0100840336134453, "percentage": 33.67, "elapsed_time": "5:56:31", "remaining_time": "11:42:21"}
|
201 |
+
{"current_steps": 201, "total_steps": 594, "loss": 0.716, "lr": 8.397782030148147e-06, "epoch": 1.015126050420168, "percentage": 33.84, "elapsed_time": "5:59:46", "remaining_time": "11:43:26"}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:63fc5cd8c7c0beb250e775fa0143a9d79f64ab7140a9063b75aa45bee0ba675a
|
3 |
+
size 8273
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|