Training in progress, step 200
Browse files- .gitattributes +1 -0
- added_tokens.json +24 -0
- config.json +28 -0
- merges.txt +0 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +346 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +209 -0
- trainer_log.jsonl +201 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"</tool_call>": 151658,
|
3 |
+
"<tool_call>": 151657,
|
4 |
+
"<|box_end|>": 151649,
|
5 |
+
"<|box_start|>": 151648,
|
6 |
+
"<|endoftext|>": 151643,
|
7 |
+
"<|file_sep|>": 151664,
|
8 |
+
"<|fim_middle|>": 151660,
|
9 |
+
"<|fim_pad|>": 151662,
|
10 |
+
"<|fim_prefix|>": 151659,
|
11 |
+
"<|fim_suffix|>": 151661,
|
12 |
+
"<|im_end|>": 151645,
|
13 |
+
"<|im_start|>": 151644,
|
14 |
+
"<|image_pad|>": 151655,
|
15 |
+
"<|object_ref_end|>": 151647,
|
16 |
+
"<|object_ref_start|>": 151646,
|
17 |
+
"<|quad_end|>": 151651,
|
18 |
+
"<|quad_start|>": 151650,
|
19 |
+
"<|repo_name|>": 151663,
|
20 |
+
"<|video_pad|>": 151656,
|
21 |
+
"<|vision_end|>": 151653,
|
22 |
+
"<|vision_pad|>": 151654,
|
23 |
+
"<|vision_start|>": 151652
|
24 |
+
}
|
config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"Qwen2ForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_dropout": 0.0,
|
6 |
+
"bos_token_id": 151643,
|
7 |
+
"eos_token_id": 151645,
|
8 |
+
"hidden_act": "silu",
|
9 |
+
"hidden_size": 3584,
|
10 |
+
"initializer_range": 0.02,
|
11 |
+
"intermediate_size": 18944,
|
12 |
+
"max_position_embeddings": 32768,
|
13 |
+
"max_window_layers": 28,
|
14 |
+
"model_type": "qwen2",
|
15 |
+
"num_attention_heads": 28,
|
16 |
+
"num_hidden_layers": 28,
|
17 |
+
"num_key_value_heads": 4,
|
18 |
+
"rms_norm_eps": 1e-06,
|
19 |
+
"rope_scaling": null,
|
20 |
+
"rope_theta": 1000000.0,
|
21 |
+
"sliding_window": 131072,
|
22 |
+
"tie_word_embeddings": false,
|
23 |
+
"torch_dtype": "bfloat16",
|
24 |
+
"transformers_version": "4.51.2",
|
25 |
+
"use_cache": false,
|
26 |
+
"use_sliding_window": false,
|
27 |
+
"vocab_size": 152064
|
28 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model-00001-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1533bf0bdb713130e450b940d8a89488d1d629e39bc72a1d7a2a5d5bd5c29a2b
|
3 |
+
size 4877660776
|
model-00002-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:852dd860a1d985235fd6ed22b402b79d12af2945c1e9d98fc88f8e577770a4be
|
3 |
+
size 4932751008
|
model-00003-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b8283278b6f3a9ab7e2773057e4480480033a89dd1d5246c4b579bbc7d7ae7a0
|
3 |
+
size 4330865200
|
model-00004-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:92cd301e4aabc6d6293544ec24b694a863a73361d581f2afa7390037e5883312
|
3 |
+
size 1089994880
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 15231233024
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
92 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
104 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
116 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
212 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
224 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
236 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
248 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
320 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
332 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
344 |
+
"model.norm.weight": "model-00003-of-00004.safetensors"
|
345 |
+
}
|
346 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|im_start|>",
|
4 |
+
"<|im_end|>",
|
5 |
+
"<|object_ref_start|>",
|
6 |
+
"<|object_ref_end|>",
|
7 |
+
"<|box_start|>",
|
8 |
+
"<|box_end|>",
|
9 |
+
"<|quad_start|>",
|
10 |
+
"<|quad_end|>",
|
11 |
+
"<|vision_start|>",
|
12 |
+
"<|vision_end|>",
|
13 |
+
"<|vision_pad|>",
|
14 |
+
"<|image_pad|>",
|
15 |
+
"<|video_pad|>"
|
16 |
+
],
|
17 |
+
"eos_token": {
|
18 |
+
"content": "<|im_end|>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"pad_token": {
|
25 |
+
"content": "<|endoftext|>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
3 |
+
size 11421896
|
tokenizer_config.json
ADDED
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": false,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"151643": {
|
6 |
+
"content": "<|endoftext|>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"151644": {
|
14 |
+
"content": "<|im_start|>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"151645": {
|
22 |
+
"content": "<|im_end|>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"151646": {
|
30 |
+
"content": "<|object_ref_start|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"151647": {
|
38 |
+
"content": "<|object_ref_end|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
"151648": {
|
46 |
+
"content": "<|box_start|>",
|
47 |
+
"lstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"rstrip": false,
|
50 |
+
"single_word": false,
|
51 |
+
"special": true
|
52 |
+
},
|
53 |
+
"151649": {
|
54 |
+
"content": "<|box_end|>",
|
55 |
+
"lstrip": false,
|
56 |
+
"normalized": false,
|
57 |
+
"rstrip": false,
|
58 |
+
"single_word": false,
|
59 |
+
"special": true
|
60 |
+
},
|
61 |
+
"151650": {
|
62 |
+
"content": "<|quad_start|>",
|
63 |
+
"lstrip": false,
|
64 |
+
"normalized": false,
|
65 |
+
"rstrip": false,
|
66 |
+
"single_word": false,
|
67 |
+
"special": true
|
68 |
+
},
|
69 |
+
"151651": {
|
70 |
+
"content": "<|quad_end|>",
|
71 |
+
"lstrip": false,
|
72 |
+
"normalized": false,
|
73 |
+
"rstrip": false,
|
74 |
+
"single_word": false,
|
75 |
+
"special": true
|
76 |
+
},
|
77 |
+
"151652": {
|
78 |
+
"content": "<|vision_start|>",
|
79 |
+
"lstrip": false,
|
80 |
+
"normalized": false,
|
81 |
+
"rstrip": false,
|
82 |
+
"single_word": false,
|
83 |
+
"special": true
|
84 |
+
},
|
85 |
+
"151653": {
|
86 |
+
"content": "<|vision_end|>",
|
87 |
+
"lstrip": false,
|
88 |
+
"normalized": false,
|
89 |
+
"rstrip": false,
|
90 |
+
"single_word": false,
|
91 |
+
"special": true
|
92 |
+
},
|
93 |
+
"151654": {
|
94 |
+
"content": "<|vision_pad|>",
|
95 |
+
"lstrip": false,
|
96 |
+
"normalized": false,
|
97 |
+
"rstrip": false,
|
98 |
+
"single_word": false,
|
99 |
+
"special": true
|
100 |
+
},
|
101 |
+
"151655": {
|
102 |
+
"content": "<|image_pad|>",
|
103 |
+
"lstrip": false,
|
104 |
+
"normalized": false,
|
105 |
+
"rstrip": false,
|
106 |
+
"single_word": false,
|
107 |
+
"special": true
|
108 |
+
},
|
109 |
+
"151656": {
|
110 |
+
"content": "<|video_pad|>",
|
111 |
+
"lstrip": false,
|
112 |
+
"normalized": false,
|
113 |
+
"rstrip": false,
|
114 |
+
"single_word": false,
|
115 |
+
"special": true
|
116 |
+
},
|
117 |
+
"151657": {
|
118 |
+
"content": "<tool_call>",
|
119 |
+
"lstrip": false,
|
120 |
+
"normalized": false,
|
121 |
+
"rstrip": false,
|
122 |
+
"single_word": false,
|
123 |
+
"special": false
|
124 |
+
},
|
125 |
+
"151658": {
|
126 |
+
"content": "</tool_call>",
|
127 |
+
"lstrip": false,
|
128 |
+
"normalized": false,
|
129 |
+
"rstrip": false,
|
130 |
+
"single_word": false,
|
131 |
+
"special": false
|
132 |
+
},
|
133 |
+
"151659": {
|
134 |
+
"content": "<|fim_prefix|>",
|
135 |
+
"lstrip": false,
|
136 |
+
"normalized": false,
|
137 |
+
"rstrip": false,
|
138 |
+
"single_word": false,
|
139 |
+
"special": false
|
140 |
+
},
|
141 |
+
"151660": {
|
142 |
+
"content": "<|fim_middle|>",
|
143 |
+
"lstrip": false,
|
144 |
+
"normalized": false,
|
145 |
+
"rstrip": false,
|
146 |
+
"single_word": false,
|
147 |
+
"special": false
|
148 |
+
},
|
149 |
+
"151661": {
|
150 |
+
"content": "<|fim_suffix|>",
|
151 |
+
"lstrip": false,
|
152 |
+
"normalized": false,
|
153 |
+
"rstrip": false,
|
154 |
+
"single_word": false,
|
155 |
+
"special": false
|
156 |
+
},
|
157 |
+
"151662": {
|
158 |
+
"content": "<|fim_pad|>",
|
159 |
+
"lstrip": false,
|
160 |
+
"normalized": false,
|
161 |
+
"rstrip": false,
|
162 |
+
"single_word": false,
|
163 |
+
"special": false
|
164 |
+
},
|
165 |
+
"151663": {
|
166 |
+
"content": "<|repo_name|>",
|
167 |
+
"lstrip": false,
|
168 |
+
"normalized": false,
|
169 |
+
"rstrip": false,
|
170 |
+
"single_word": false,
|
171 |
+
"special": false
|
172 |
+
},
|
173 |
+
"151664": {
|
174 |
+
"content": "<|file_sep|>",
|
175 |
+
"lstrip": false,
|
176 |
+
"normalized": false,
|
177 |
+
"rstrip": false,
|
178 |
+
"single_word": false,
|
179 |
+
"special": false
|
180 |
+
}
|
181 |
+
},
|
182 |
+
"additional_special_tokens": [
|
183 |
+
"<|im_start|>",
|
184 |
+
"<|im_end|>",
|
185 |
+
"<|object_ref_start|>",
|
186 |
+
"<|object_ref_end|>",
|
187 |
+
"<|box_start|>",
|
188 |
+
"<|box_end|>",
|
189 |
+
"<|quad_start|>",
|
190 |
+
"<|quad_end|>",
|
191 |
+
"<|vision_start|>",
|
192 |
+
"<|vision_end|>",
|
193 |
+
"<|vision_pad|>",
|
194 |
+
"<|image_pad|>",
|
195 |
+
"<|video_pad|>"
|
196 |
+
],
|
197 |
+
"bos_token": null,
|
198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
199 |
+
"clean_up_tokenization_spaces": false,
|
200 |
+
"eos_token": "<|im_end|>",
|
201 |
+
"errors": "replace",
|
202 |
+
"extra_special_tokens": {},
|
203 |
+
"model_max_length": 32768,
|
204 |
+
"pad_token": "<|endoftext|>",
|
205 |
+
"padding_side": "right",
|
206 |
+
"split_special_tokens": false,
|
207 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
208 |
+
"unk_token": null
|
209 |
+
}
|
trainer_log.jsonl
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"current_steps": 1, "total_steps": 279, "loss": 0.8565, "lr": 0.0, "epoch": 0.010752688172043012, "percentage": 0.36, "elapsed_time": "0:02:03", "remaining_time": "9:30:19"}
|
2 |
+
{"current_steps": 2, "total_steps": 279, "loss": 0.7654, "lr": 3.5714285714285716e-07, "epoch": 0.021505376344086023, "percentage": 0.72, "elapsed_time": "0:04:05", "remaining_time": "9:27:31"}
|
3 |
+
{"current_steps": 3, "total_steps": 279, "loss": 0.8602, "lr": 7.142857142857143e-07, "epoch": 0.03225806451612903, "percentage": 1.08, "elapsed_time": "0:06:27", "remaining_time": "9:54:28"}
|
4 |
+
{"current_steps": 4, "total_steps": 279, "loss": 0.8179, "lr": 1.0714285714285714e-06, "epoch": 0.043010752688172046, "percentage": 1.43, "elapsed_time": "0:08:32", "remaining_time": "9:47:25"}
|
5 |
+
{"current_steps": 5, "total_steps": 279, "loss": 0.7984, "lr": 1.4285714285714286e-06, "epoch": 0.053763440860215055, "percentage": 1.79, "elapsed_time": "0:10:26", "remaining_time": "9:32:33"}
|
6 |
+
{"current_steps": 6, "total_steps": 279, "loss": 0.8393, "lr": 1.7857142857142859e-06, "epoch": 0.06451612903225806, "percentage": 2.15, "elapsed_time": "0:12:17", "remaining_time": "9:18:56"}
|
7 |
+
{"current_steps": 7, "total_steps": 279, "loss": 0.7408, "lr": 2.1428571428571427e-06, "epoch": 0.07526881720430108, "percentage": 2.51, "elapsed_time": "0:14:34", "remaining_time": "9:26:24"}
|
8 |
+
{"current_steps": 8, "total_steps": 279, "loss": 0.7991, "lr": 2.5e-06, "epoch": 0.08602150537634409, "percentage": 2.87, "elapsed_time": "0:16:26", "remaining_time": "9:16:41"}
|
9 |
+
{"current_steps": 9, "total_steps": 279, "loss": 0.7439, "lr": 2.8571428571428573e-06, "epoch": 0.0967741935483871, "percentage": 3.23, "elapsed_time": "0:18:09", "remaining_time": "9:04:33"}
|
10 |
+
{"current_steps": 10, "total_steps": 279, "loss": 0.7656, "lr": 3.2142857142857147e-06, "epoch": 0.10752688172043011, "percentage": 3.58, "elapsed_time": "0:20:24", "remaining_time": "9:08:52"}
|
11 |
+
{"current_steps": 11, "total_steps": 279, "loss": 0.6424, "lr": 3.5714285714285718e-06, "epoch": 0.11827956989247312, "percentage": 3.94, "elapsed_time": "0:22:42", "remaining_time": "9:13:25"}
|
12 |
+
{"current_steps": 12, "total_steps": 279, "loss": 0.6746, "lr": 3.928571428571429e-06, "epoch": 0.12903225806451613, "percentage": 4.3, "elapsed_time": "0:24:53", "remaining_time": "9:14:01"}
|
13 |
+
{"current_steps": 13, "total_steps": 279, "loss": 0.6786, "lr": 4.2857142857142855e-06, "epoch": 0.13978494623655913, "percentage": 4.66, "elapsed_time": "0:26:47", "remaining_time": "9:08:19"}
|
14 |
+
{"current_steps": 14, "total_steps": 279, "loss": 0.6464, "lr": 4.642857142857144e-06, "epoch": 0.15053763440860216, "percentage": 5.02, "elapsed_time": "0:29:10", "remaining_time": "9:12:19"}
|
15 |
+
{"current_steps": 15, "total_steps": 279, "loss": 0.5924, "lr": 5e-06, "epoch": 0.16129032258064516, "percentage": 5.38, "elapsed_time": "0:31:09", "remaining_time": "9:08:19"}
|
16 |
+
{"current_steps": 16, "total_steps": 279, "loss": 0.6771, "lr": 5.357142857142857e-06, "epoch": 0.17204301075268819, "percentage": 5.73, "elapsed_time": "0:33:19", "remaining_time": "9:07:53"}
|
17 |
+
{"current_steps": 17, "total_steps": 279, "loss": 0.6609, "lr": 5.7142857142857145e-06, "epoch": 0.1827956989247312, "percentage": 6.09, "elapsed_time": "0:35:24", "remaining_time": "9:05:45"}
|
18 |
+
{"current_steps": 18, "total_steps": 279, "loss": 0.665, "lr": 6.071428571428571e-06, "epoch": 0.1935483870967742, "percentage": 6.45, "elapsed_time": "0:37:15", "remaining_time": "9:00:07"}
|
19 |
+
{"current_steps": 19, "total_steps": 279, "loss": 0.5946, "lr": 6.4285714285714295e-06, "epoch": 0.20430107526881722, "percentage": 6.81, "elapsed_time": "0:39:41", "remaining_time": "9:03:13"}
|
20 |
+
{"current_steps": 20, "total_steps": 279, "loss": 0.6027, "lr": 6.785714285714287e-06, "epoch": 0.21505376344086022, "percentage": 7.17, "elapsed_time": "0:41:32", "remaining_time": "8:58:01"}
|
21 |
+
{"current_steps": 21, "total_steps": 279, "loss": 0.6523, "lr": 7.1428571428571436e-06, "epoch": 0.22580645161290322, "percentage": 7.53, "elapsed_time": "0:43:56", "remaining_time": "8:59:45"}
|
22 |
+
{"current_steps": 22, "total_steps": 279, "loss": 0.5775, "lr": 7.500000000000001e-06, "epoch": 0.23655913978494625, "percentage": 7.89, "elapsed_time": "0:45:42", "remaining_time": "8:53:53"}
|
23 |
+
{"current_steps": 23, "total_steps": 279, "loss": 0.5343, "lr": 7.857142857142858e-06, "epoch": 0.24731182795698925, "percentage": 8.24, "elapsed_time": "0:47:47", "remaining_time": "8:51:51"}
|
24 |
+
{"current_steps": 24, "total_steps": 279, "loss": 0.6303, "lr": 8.214285714285714e-06, "epoch": 0.25806451612903225, "percentage": 8.6, "elapsed_time": "0:50:06", "remaining_time": "8:52:27"}
|
25 |
+
{"current_steps": 25, "total_steps": 279, "loss": 0.5673, "lr": 8.571428571428571e-06, "epoch": 0.26881720430107525, "percentage": 8.96, "elapsed_time": "0:51:54", "remaining_time": "8:47:27"}
|
26 |
+
{"current_steps": 26, "total_steps": 279, "loss": 0.5834, "lr": 8.92857142857143e-06, "epoch": 0.27956989247311825, "percentage": 9.32, "elapsed_time": "0:53:43", "remaining_time": "8:42:51"}
|
27 |
+
{"current_steps": 27, "total_steps": 279, "loss": 0.5861, "lr": 9.285714285714288e-06, "epoch": 0.2903225806451613, "percentage": 9.68, "elapsed_time": "0:55:39", "remaining_time": "8:39:29"}
|
28 |
+
{"current_steps": 28, "total_steps": 279, "loss": 0.58, "lr": 9.642857142857144e-06, "epoch": 0.3010752688172043, "percentage": 10.04, "elapsed_time": "0:58:21", "remaining_time": "8:43:12"}
|
29 |
+
{"current_steps": 29, "total_steps": 279, "loss": 0.5669, "lr": 1e-05, "epoch": 0.3118279569892473, "percentage": 10.39, "elapsed_time": "1:01:07", "remaining_time": "8:46:57"}
|
30 |
+
{"current_steps": 30, "total_steps": 279, "loss": 0.551, "lr": 9.999608360361114e-06, "epoch": 0.3225806451612903, "percentage": 10.75, "elapsed_time": "1:03:08", "remaining_time": "8:44:03"}
|
31 |
+
{"current_steps": 31, "total_steps": 279, "loss": 0.6109, "lr": 9.998433502797097e-06, "epoch": 0.3333333333333333, "percentage": 11.11, "elapsed_time": "1:05:10", "remaining_time": "8:41:26"}
|
32 |
+
{"current_steps": 32, "total_steps": 279, "loss": 0.5155, "lr": 9.996475611356265e-06, "epoch": 0.34408602150537637, "percentage": 11.47, "elapsed_time": "1:06:58", "remaining_time": "8:36:58"}
|
33 |
+
{"current_steps": 33, "total_steps": 279, "loss": 0.5167, "lr": 9.993734992753777e-06, "epoch": 0.3548387096774194, "percentage": 11.83, "elapsed_time": "1:09:14", "remaining_time": "8:36:06"}
|
34 |
+
{"current_steps": 34, "total_steps": 279, "loss": 0.626, "lr": 9.990212076323587e-06, "epoch": 0.3655913978494624, "percentage": 12.19, "elapsed_time": "1:11:18", "remaining_time": "8:33:48"}
|
35 |
+
{"current_steps": 35, "total_steps": 279, "loss": 0.5301, "lr": 9.98590741395118e-06, "epoch": 0.3763440860215054, "percentage": 12.54, "elapsed_time": "1:13:32", "remaining_time": "8:32:41"}
|
36 |
+
{"current_steps": 36, "total_steps": 279, "loss": 0.5705, "lr": 9.980821679987125e-06, "epoch": 0.3870967741935484, "percentage": 12.9, "elapsed_time": "1:15:40", "remaining_time": "8:30:48"}
|
37 |
+
{"current_steps": 37, "total_steps": 279, "loss": 0.5742, "lr": 9.974955671141425e-06, "epoch": 0.3978494623655914, "percentage": 13.26, "elapsed_time": "1:17:50", "remaining_time": "8:29:07"}
|
38 |
+
{"current_steps": 38, "total_steps": 279, "loss": 0.5443, "lr": 9.968310306358715e-06, "epoch": 0.40860215053763443, "percentage": 13.62, "elapsed_time": "1:19:42", "remaining_time": "8:25:33"}
|
39 |
+
{"current_steps": 39, "total_steps": 279, "loss": 0.6328, "lr": 9.960886626674302e-06, "epoch": 0.41935483870967744, "percentage": 13.98, "elapsed_time": "1:21:58", "remaining_time": "8:24:24"}
|
40 |
+
{"current_steps": 40, "total_steps": 279, "loss": 0.531, "lr": 9.952685795051078e-06, "epoch": 0.43010752688172044, "percentage": 14.34, "elapsed_time": "1:23:33", "remaining_time": "8:19:18"}
|
41 |
+
{"current_steps": 41, "total_steps": 279, "loss": 0.5771, "lr": 9.943709096197334e-06, "epoch": 0.44086021505376344, "percentage": 14.7, "elapsed_time": "1:25:16", "remaining_time": "8:15:03"}
|
42 |
+
{"current_steps": 42, "total_steps": 279, "loss": 0.6155, "lr": 9.933957936365515e-06, "epoch": 0.45161290322580644, "percentage": 15.05, "elapsed_time": "1:27:09", "remaining_time": "8:11:50"}
|
43 |
+
{"current_steps": 43, "total_steps": 279, "loss": 0.5614, "lr": 9.9234338431319e-06, "epoch": 0.46236559139784944, "percentage": 15.41, "elapsed_time": "1:29:15", "remaining_time": "8:09:53"}
|
44 |
+
{"current_steps": 44, "total_steps": 279, "loss": 0.5821, "lr": 9.912138465157325e-06, "epoch": 0.4731182795698925, "percentage": 15.77, "elapsed_time": "1:31:12", "remaining_time": "8:07:05"}
|
45 |
+
{"current_steps": 45, "total_steps": 279, "loss": 0.5793, "lr": 9.900073571928887e-06, "epoch": 0.4838709677419355, "percentage": 16.13, "elapsed_time": "1:33:04", "remaining_time": "8:03:58"}
|
46 |
+
{"current_steps": 46, "total_steps": 279, "loss": 0.5629, "lr": 9.887241053482756e-06, "epoch": 0.4946236559139785, "percentage": 16.49, "elapsed_time": "1:35:04", "remaining_time": "8:01:34"}
|
47 |
+
{"current_steps": 47, "total_steps": 279, "loss": 0.5235, "lr": 9.87364292010809e-06, "epoch": 0.5053763440860215, "percentage": 16.85, "elapsed_time": "1:36:24", "remaining_time": "7:55:53"}
|
48 |
+
{"current_steps": 48, "total_steps": 279, "loss": 0.5151, "lr": 9.859281302032107e-06, "epoch": 0.5161290322580645, "percentage": 17.2, "elapsed_time": "1:38:05", "remaining_time": "7:52:03"}
|
49 |
+
{"current_steps": 49, "total_steps": 279, "loss": 0.5452, "lr": 9.844158449086372e-06, "epoch": 0.5268817204301075, "percentage": 17.56, "elapsed_time": "1:40:08", "remaining_time": "7:50:03"}
|
50 |
+
{"current_steps": 50, "total_steps": 279, "loss": 0.5463, "lr": 9.828276730354353e-06, "epoch": 0.5376344086021505, "percentage": 17.92, "elapsed_time": "1:42:11", "remaining_time": "7:48:01"}
|
51 |
+
{"current_steps": 51, "total_steps": 279, "loss": 0.524, "lr": 9.811638633800287e-06, "epoch": 0.5483870967741935, "percentage": 18.28, "elapsed_time": "1:44:25", "remaining_time": "7:46:51"}
|
52 |
+
{"current_steps": 52, "total_steps": 279, "loss": 0.5134, "lr": 9.794246765879421e-06, "epoch": 0.5591397849462365, "percentage": 18.64, "elapsed_time": "1:46:15", "remaining_time": "7:43:53"}
|
53 |
+
{"current_steps": 53, "total_steps": 279, "loss": 0.5074, "lr": 9.776103851129706e-06, "epoch": 0.5698924731182796, "percentage": 19.0, "elapsed_time": "1:48:26", "remaining_time": "7:42:23"}
|
54 |
+
{"current_steps": 54, "total_steps": 279, "loss": 0.4936, "lr": 9.757212731744973e-06, "epoch": 0.5806451612903226, "percentage": 19.35, "elapsed_time": "1:50:24", "remaining_time": "7:40:01"}
|
55 |
+
{"current_steps": 55, "total_steps": 279, "loss": 0.5291, "lr": 9.737576367129694e-06, "epoch": 0.5913978494623656, "percentage": 19.71, "elapsed_time": "1:52:54", "remaining_time": "7:39:49"}
|
56 |
+
{"current_steps": 56, "total_steps": 279, "loss": 0.5401, "lr": 9.717197833435367e-06, "epoch": 0.6021505376344086, "percentage": 20.07, "elapsed_time": "1:54:46", "remaining_time": "7:37:02"}
|
57 |
+
{"current_steps": 57, "total_steps": 279, "loss": 0.5341, "lr": 9.696080323078621e-06, "epoch": 0.6129032258064516, "percentage": 20.43, "elapsed_time": "1:56:56", "remaining_time": "7:35:27"}
|
58 |
+
{"current_steps": 58, "total_steps": 279, "loss": 0.4811, "lr": 9.67422714424111e-06, "epoch": 0.6236559139784946, "percentage": 20.79, "elapsed_time": "1:58:52", "remaining_time": "7:32:56"}
|
59 |
+
{"current_steps": 59, "total_steps": 279, "loss": 0.5126, "lr": 9.651641720351262e-06, "epoch": 0.6344086021505376, "percentage": 21.15, "elapsed_time": "2:00:56", "remaining_time": "7:30:58"}
|
60 |
+
{"current_steps": 60, "total_steps": 279, "loss": 0.5147, "lr": 9.628327589547977e-06, "epoch": 0.6451612903225806, "percentage": 21.51, "elapsed_time": "2:02:52", "remaining_time": "7:28:31"}
|
61 |
+
{"current_steps": 61, "total_steps": 279, "loss": 0.533, "lr": 9.604288404126362e-06, "epoch": 0.6559139784946236, "percentage": 21.86, "elapsed_time": "2:04:33", "remaining_time": "7:25:08"}
|
62 |
+
{"current_steps": 62, "total_steps": 279, "loss": 0.5972, "lr": 9.579527929965581e-06, "epoch": 0.6666666666666666, "percentage": 22.22, "elapsed_time": "2:07:08", "remaining_time": "7:24:58"}
|
63 |
+
{"current_steps": 63, "total_steps": 279, "loss": 0.5211, "lr": 9.554050045938893e-06, "epoch": 0.6774193548387096, "percentage": 22.58, "elapsed_time": "2:10:03", "remaining_time": "7:25:53"}
|
64 |
+
{"current_steps": 64, "total_steps": 279, "loss": 0.5664, "lr": 9.52785874330602e-06, "epoch": 0.6881720430107527, "percentage": 22.94, "elapsed_time": "2:12:04", "remaining_time": "7:23:40"}
|
65 |
+
{"current_steps": 65, "total_steps": 279, "loss": 0.52, "lr": 9.500958125087882e-06, "epoch": 0.6989247311827957, "percentage": 23.3, "elapsed_time": "2:14:04", "remaining_time": "7:21:24"}
|
66 |
+
{"current_steps": 66, "total_steps": 279, "loss": 0.5192, "lr": 9.473352405423845e-06, "epoch": 0.7096774193548387, "percentage": 23.66, "elapsed_time": "2:16:21", "remaining_time": "7:20:03"}
|
67 |
+
{"current_steps": 67, "total_steps": 279, "loss": 0.5093, "lr": 9.445045908911536e-06, "epoch": 0.7204301075268817, "percentage": 24.01, "elapsed_time": "2:18:29", "remaining_time": "7:18:13"}
|
68 |
+
{"current_steps": 68, "total_steps": 279, "loss": 0.4891, "lr": 9.416043069929389e-06, "epoch": 0.7311827956989247, "percentage": 24.37, "elapsed_time": "2:20:58", "remaining_time": "7:17:27"}
|
69 |
+
{"current_steps": 69, "total_steps": 279, "loss": 0.4979, "lr": 9.386348431941953e-06, "epoch": 0.7419354838709677, "percentage": 24.73, "elapsed_time": "2:23:10", "remaining_time": "7:15:44"}
|
70 |
+
{"current_steps": 70, "total_steps": 279, "loss": 0.4716, "lr": 9.355966646788152e-06, "epoch": 0.7526881720430108, "percentage": 25.09, "elapsed_time": "2:25:31", "remaining_time": "7:14:29"}
|
71 |
+
{"current_steps": 71, "total_steps": 279, "loss": 0.4697, "lr": 9.324902473952529e-06, "epoch": 0.7634408602150538, "percentage": 25.45, "elapsed_time": "2:27:22", "remaining_time": "7:11:45"}
|
72 |
+
{"current_steps": 72, "total_steps": 279, "loss": 0.5306, "lr": 9.293160779819658e-06, "epoch": 0.7741935483870968, "percentage": 25.81, "elapsed_time": "2:29:03", "remaining_time": "7:08:31"}
|
73 |
+
{"current_steps": 73, "total_steps": 279, "loss": 0.5267, "lr": 9.260746536911792e-06, "epoch": 0.7849462365591398, "percentage": 26.16, "elapsed_time": "2:30:40", "remaining_time": "7:05:12"}
|
74 |
+
{"current_steps": 74, "total_steps": 279, "loss": 0.5243, "lr": 9.227664823109884e-06, "epoch": 0.7956989247311828, "percentage": 26.52, "elapsed_time": "2:32:25", "remaining_time": "7:02:14"}
|
75 |
+
{"current_steps": 75, "total_steps": 279, "loss": 0.4986, "lr": 9.193920820858113e-06, "epoch": 0.8064516129032258, "percentage": 26.88, "elapsed_time": "2:34:42", "remaining_time": "7:00:47"}
|
76 |
+
{"current_steps": 76, "total_steps": 279, "loss": 0.5732, "lr": 9.159519816352021e-06, "epoch": 0.8172043010752689, "percentage": 27.24, "elapsed_time": "2:36:42", "remaining_time": "6:58:33"}
|
77 |
+
{"current_steps": 77, "total_steps": 279, "loss": 0.5374, "lr": 9.124467198710401e-06, "epoch": 0.8279569892473119, "percentage": 27.6, "elapsed_time": "2:38:16", "remaining_time": "6:55:12"}
|
78 |
+
{"current_steps": 78, "total_steps": 279, "loss": 0.5251, "lr": 9.08876845913106e-06, "epoch": 0.8387096774193549, "percentage": 27.96, "elapsed_time": "2:40:19", "remaining_time": "6:53:09"}
|
79 |
+
{"current_steps": 79, "total_steps": 279, "loss": 0.5325, "lr": 9.052429190030589e-06, "epoch": 0.8494623655913979, "percentage": 28.32, "elapsed_time": "2:42:12", "remaining_time": "6:50:38"}
|
80 |
+
{"current_steps": 80, "total_steps": 279, "loss": 0.5008, "lr": 9.015455084168279e-06, "epoch": 0.8602150537634409, "percentage": 28.67, "elapsed_time": "2:44:14", "remaining_time": "6:48:34"}
|
81 |
+
{"current_steps": 81, "total_steps": 279, "loss": 0.5294, "lr": 8.977851933754317e-06, "epoch": 0.8709677419354839, "percentage": 29.03, "elapsed_time": "2:46:25", "remaining_time": "6:46:49"}
|
82 |
+
{"current_steps": 82, "total_steps": 279, "loss": 0.5065, "lr": 8.939625629542401e-06, "epoch": 0.8817204301075269, "percentage": 29.39, "elapsed_time": "2:48:43", "remaining_time": "6:45:20"}
|
83 |
+
{"current_steps": 83, "total_steps": 279, "loss": 0.5577, "lr": 8.900782159906927e-06, "epoch": 0.8924731182795699, "percentage": 29.75, "elapsed_time": "2:50:40", "remaining_time": "6:43:02"}
|
84 |
+
{"current_steps": 84, "total_steps": 279, "loss": 0.5607, "lr": 8.861327609904859e-06, "epoch": 0.9032258064516129, "percentage": 30.11, "elapsed_time": "2:52:59", "remaining_time": "6:41:34"}
|
85 |
+
{"current_steps": 85, "total_steps": 279, "loss": 0.4559, "lr": 8.821268160322482e-06, "epoch": 0.9139784946236559, "percentage": 30.47, "elapsed_time": "2:54:42", "remaining_time": "6:38:44"}
|
86 |
+
{"current_steps": 86, "total_steps": 279, "loss": 0.4524, "lr": 8.780610086707149e-06, "epoch": 0.9247311827956989, "percentage": 30.82, "elapsed_time": "2:56:47", "remaining_time": "6:36:45"}
|
87 |
+
{"current_steps": 87, "total_steps": 279, "loss": 0.5183, "lr": 8.739359758384162e-06, "epoch": 0.9354838709677419, "percentage": 31.18, "elapsed_time": "2:58:34", "remaining_time": "6:34:04"}
|
88 |
+
{"current_steps": 88, "total_steps": 279, "loss": 0.6781, "lr": 8.697523637458997e-06, "epoch": 0.946236559139785, "percentage": 31.54, "elapsed_time": "3:00:19", "remaining_time": "6:31:23"}
|
89 |
+
{"current_steps": 89, "total_steps": 279, "loss": 0.4719, "lr": 8.655108277804975e-06, "epoch": 0.956989247311828, "percentage": 31.9, "elapsed_time": "3:02:00", "remaining_time": "6:28:33"}
|
90 |
+
{"current_steps": 90, "total_steps": 279, "loss": 0.5298, "lr": 8.612120324036548e-06, "epoch": 0.967741935483871, "percentage": 32.26, "elapsed_time": "3:04:09", "remaining_time": "6:26:42"}
|
91 |
+
{"current_steps": 91, "total_steps": 279, "loss": 0.4908, "lr": 8.568566510468392e-06, "epoch": 0.978494623655914, "percentage": 32.62, "elapsed_time": "3:06:25", "remaining_time": "6:25:08"}
|
92 |
+
{"current_steps": 92, "total_steps": 279, "loss": 0.4884, "lr": 8.524453660060434e-06, "epoch": 0.989247311827957, "percentage": 32.97, "elapsed_time": "3:08:55", "remaining_time": "6:24:00"}
|
93 |
+
{"current_steps": 93, "total_steps": 279, "loss": 0.5282, "lr": 8.479788683348996e-06, "epoch": 1.0, "percentage": 33.33, "elapsed_time": "3:10:56", "remaining_time": "6:21:52"}
|
94 |
+
{"current_steps": 94, "total_steps": 279, "loss": 0.4053, "lr": 8.434578577364218e-06, "epoch": 1.010752688172043, "percentage": 33.69, "elapsed_time": "3:13:45", "remaining_time": "6:21:19"}
|
95 |
+
{"current_steps": 95, "total_steps": 279, "loss": 0.337, "lr": 8.388830424533935e-06, "epoch": 1.021505376344086, "percentage": 34.05, "elapsed_time": "3:15:53", "remaining_time": "6:19:23"}
|
96 |
+
{"current_steps": 96, "total_steps": 279, "loss": 0.3569, "lr": 8.342551391574165e-06, "epoch": 1.032258064516129, "percentage": 34.41, "elapsed_time": "3:17:24", "remaining_time": "6:16:18"}
|
97 |
+
{"current_steps": 97, "total_steps": 279, "loss": 0.3488, "lr": 8.295748728366414e-06, "epoch": 1.043010752688172, "percentage": 34.77, "elapsed_time": "3:19:19", "remaining_time": "6:14:00"}
|
98 |
+
{"current_steps": 98, "total_steps": 279, "loss": 0.387, "lr": 8.248429766821925e-06, "epoch": 1.053763440860215, "percentage": 35.13, "elapsed_time": "3:21:21", "remaining_time": "6:11:53"}
|
99 |
+
{"current_steps": 99, "total_steps": 279, "loss": 0.3476, "lr": 8.200601919733106e-06, "epoch": 1.064516129032258, "percentage": 35.48, "elapsed_time": "3:23:17", "remaining_time": "6:09:37"}
|
100 |
+
{"current_steps": 100, "total_steps": 279, "loss": 0.338, "lr": 8.15227267961226e-06, "epoch": 1.075268817204301, "percentage": 35.84, "elapsed_time": "3:25:09", "remaining_time": "6:07:14"}
|
101 |
+
{"current_steps": 101, "total_steps": 279, "loss": 0.3756, "lr": 8.10344961751785e-06, "epoch": 1.086021505376344, "percentage": 36.2, "elapsed_time": "3:27:17", "remaining_time": "6:05:18"}
|
102 |
+
{"current_steps": 102, "total_steps": 279, "loss": 0.3643, "lr": 8.054140381868435e-06, "epoch": 1.096774193548387, "percentage": 36.56, "elapsed_time": "3:29:59", "remaining_time": "6:04:23"}
|
103 |
+
{"current_steps": 103, "total_steps": 279, "loss": 0.3537, "lr": 8.004352697244516e-06, "epoch": 1.10752688172043, "percentage": 36.92, "elapsed_time": "3:32:12", "remaining_time": "6:02:35"}
|
104 |
+
{"current_steps": 104, "total_steps": 279, "loss": 0.3851, "lr": 7.954094363178421e-06, "epoch": 1.118279569892473, "percentage": 37.28, "elapsed_time": "3:34:23", "remaining_time": "6:00:45"}
|
105 |
+
{"current_steps": 105, "total_steps": 279, "loss": 0.3236, "lr": 7.903373252932474e-06, "epoch": 1.129032258064516, "percentage": 37.63, "elapsed_time": "3:36:24", "remaining_time": "5:58:36"}
|
106 |
+
{"current_steps": 106, "total_steps": 279, "loss": 0.4107, "lr": 7.852197312265592e-06, "epoch": 1.139784946236559, "percentage": 37.99, "elapsed_time": "3:38:13", "remaining_time": "5:56:09"}
|
107 |
+
{"current_steps": 107, "total_steps": 279, "loss": 0.3655, "lr": 7.800574558188548e-06, "epoch": 1.1505376344086022, "percentage": 38.35, "elapsed_time": "3:40:28", "remaining_time": "5:54:24"}
|
108 |
+
{"current_steps": 108, "total_steps": 279, "loss": 0.319, "lr": 7.748513077708044e-06, "epoch": 1.1612903225806452, "percentage": 38.71, "elapsed_time": "3:42:59", "remaining_time": "5:53:04"}
|
109 |
+
{"current_steps": 109, "total_steps": 279, "loss": 0.3257, "lr": 7.69602102655985e-06, "epoch": 1.1720430107526882, "percentage": 39.07, "elapsed_time": "3:44:39", "remaining_time": "5:50:22"}
|
110 |
+
{"current_steps": 110, "total_steps": 279, "loss": 0.5513, "lr": 7.643106627931148e-06, "epoch": 1.1827956989247312, "percentage": 39.43, "elapsed_time": "3:46:49", "remaining_time": "5:48:29"}
|
111 |
+
{"current_steps": 111, "total_steps": 279, "loss": 0.3581, "lr": 7.5897781711723215e-06, "epoch": 1.1935483870967742, "percentage": 39.78, "elapsed_time": "3:48:33", "remaining_time": "5:45:55"}
|
112 |
+
{"current_steps": 112, "total_steps": 279, "loss": 0.3513, "lr": 7.536044010498396e-06, "epoch": 1.2043010752688172, "percentage": 40.14, "elapsed_time": "3:50:45", "remaining_time": "5:44:04"}
|
113 |
+
{"current_steps": 113, "total_steps": 279, "loss": 0.3443, "lr": 7.48191256368028e-06, "epoch": 1.2150537634408602, "percentage": 40.5, "elapsed_time": "3:52:56", "remaining_time": "5:42:11"}
|
114 |
+
{"current_steps": 114, "total_steps": 279, "loss": 0.3603, "lr": 7.427392310726088e-06, "epoch": 1.2258064516129032, "percentage": 40.86, "elapsed_time": "3:55:41", "remaining_time": "5:41:08"}
|
115 |
+
{"current_steps": 115, "total_steps": 279, "loss": 0.3738, "lr": 7.372491792552694e-06, "epoch": 1.2365591397849462, "percentage": 41.22, "elapsed_time": "3:57:54", "remaining_time": "5:39:16"}
|
116 |
+
{"current_steps": 116, "total_steps": 279, "loss": 0.3881, "lr": 7.31721960964774e-06, "epoch": 1.2473118279569892, "percentage": 41.58, "elapsed_time": "4:00:03", "remaining_time": "5:37:18"}
|
117 |
+
{"current_steps": 117, "total_steps": 279, "loss": 0.5288, "lr": 7.261584420722328e-06, "epoch": 1.2580645161290323, "percentage": 41.94, "elapsed_time": "4:02:35", "remaining_time": "5:35:53"}
|
118 |
+
{"current_steps": 118, "total_steps": 279, "loss": 0.3781, "lr": 7.20559494135458e-06, "epoch": 1.2688172043010753, "percentage": 42.29, "elapsed_time": "4:04:35", "remaining_time": "5:33:42"}
|
119 |
+
{"current_steps": 119, "total_steps": 279, "loss": 0.3717, "lr": 7.149259942624287e-06, "epoch": 1.2795698924731183, "percentage": 42.65, "elapsed_time": "4:06:22", "remaining_time": "5:31:15"}
|
120 |
+
{"current_steps": 120, "total_steps": 279, "loss": 0.373, "lr": 7.092588249738871e-06, "epoch": 1.2903225806451613, "percentage": 43.01, "elapsed_time": "4:08:15", "remaining_time": "5:28:55"}
|
121 |
+
{"current_steps": 121, "total_steps": 279, "loss": 0.3809, "lr": 7.03558874065087e-06, "epoch": 1.3010752688172043, "percentage": 43.37, "elapsed_time": "4:10:09", "remaining_time": "5:26:39"}
|
122 |
+
{"current_steps": 122, "total_steps": 279, "loss": 0.3583, "lr": 6.978270344667143e-06, "epoch": 1.3118279569892473, "percentage": 43.73, "elapsed_time": "4:12:05", "remaining_time": "5:24:24"}
|
123 |
+
{"current_steps": 123, "total_steps": 279, "loss": 0.3207, "lr": 6.920642041050055e-06, "epoch": 1.3225806451612903, "percentage": 44.09, "elapsed_time": "4:14:14", "remaining_time": "5:22:27"}
|
124 |
+
{"current_steps": 124, "total_steps": 279, "loss": 0.3633, "lr": 6.862712857610812e-06, "epoch": 1.3333333333333333, "percentage": 44.44, "elapsed_time": "4:16:05", "remaining_time": "5:20:06"}
|
125 |
+
{"current_steps": 125, "total_steps": 279, "loss": 0.4274, "lr": 6.804491869295207e-06, "epoch": 1.3440860215053765, "percentage": 44.8, "elapsed_time": "4:18:13", "remaining_time": "5:18:07"}
|
126 |
+
{"current_steps": 126, "total_steps": 279, "loss": 0.3779, "lr": 6.745988196761976e-06, "epoch": 1.3548387096774195, "percentage": 45.16, "elapsed_time": "4:19:53", "remaining_time": "5:15:35"}
|
127 |
+
{"current_steps": 127, "total_steps": 279, "loss": 0.3618, "lr": 6.687211004953992e-06, "epoch": 1.3655913978494625, "percentage": 45.52, "elapsed_time": "4:22:21", "remaining_time": "5:14:00"}
|
128 |
+
{"current_steps": 128, "total_steps": 279, "loss": 0.3347, "lr": 6.628169501662527e-06, "epoch": 1.3763440860215055, "percentage": 45.88, "elapsed_time": "4:24:20", "remaining_time": "5:11:49"}
|
129 |
+
{"current_steps": 129, "total_steps": 279, "loss": 0.3924, "lr": 6.568872936084789e-06, "epoch": 1.3870967741935485, "percentage": 46.24, "elapsed_time": "4:26:11", "remaining_time": "5:09:32"}
|
130 |
+
{"current_steps": 130, "total_steps": 279, "loss": 0.3349, "lr": 6.509330597374993e-06, "epoch": 1.3978494623655915, "percentage": 46.59, "elapsed_time": "4:27:59", "remaining_time": "5:07:09"}
|
131 |
+
{"current_steps": 131, "total_steps": 279, "loss": 0.383, "lr": 6.44955181318915e-06, "epoch": 1.4086021505376345, "percentage": 46.95, "elapsed_time": "4:29:41", "remaining_time": "5:04:41"}
|
132 |
+
{"current_steps": 132, "total_steps": 279, "loss": 0.35, "lr": 6.389545948223841e-06, "epoch": 1.4193548387096775, "percentage": 47.31, "elapsed_time": "4:31:56", "remaining_time": "5:02:50"}
|
133 |
+
{"current_steps": 133, "total_steps": 279, "loss": 0.36, "lr": 6.329322402749181e-06, "epoch": 1.4301075268817205, "percentage": 47.67, "elapsed_time": "4:33:34", "remaining_time": "5:00:18"}
|
134 |
+
{"current_steps": 134, "total_steps": 279, "loss": 0.366, "lr": 6.2688906111362115e-06, "epoch": 1.4408602150537635, "percentage": 48.03, "elapsed_time": "4:36:26", "remaining_time": "4:59:08"}
|
135 |
+
{"current_steps": 135, "total_steps": 279, "loss": 0.3497, "lr": 6.208260040378946e-06, "epoch": 1.4516129032258065, "percentage": 48.39, "elapsed_time": "4:38:37", "remaining_time": "4:57:12"}
|
136 |
+
{"current_steps": 136, "total_steps": 279, "loss": 0.3537, "lr": 6.147440188611324e-06, "epoch": 1.4623655913978495, "percentage": 48.75, "elapsed_time": "4:40:27", "remaining_time": "4:54:53"}
|
137 |
+
{"current_steps": 137, "total_steps": 279, "loss": 0.3527, "lr": 6.0864405836192575e-06, "epoch": 1.4731182795698925, "percentage": 49.1, "elapsed_time": "4:42:27", "remaining_time": "4:52:46"}
|
138 |
+
{"current_steps": 138, "total_steps": 279, "loss": 0.3378, "lr": 6.025270781348055e-06, "epoch": 1.4838709677419355, "percentage": 49.46, "elapsed_time": "4:44:38", "remaining_time": "4:50:50"}
|
139 |
+
{"current_steps": 139, "total_steps": 279, "loss": 0.3696, "lr": 5.963940364405425e-06, "epoch": 1.4946236559139785, "percentage": 49.82, "elapsed_time": "4:46:50", "remaining_time": "4:48:53"}
|
140 |
+
{"current_steps": 140, "total_steps": 279, "loss": 0.3738, "lr": 5.902458940560304e-06, "epoch": 1.5053763440860215, "percentage": 50.18, "elapsed_time": "4:49:19", "remaining_time": "4:47:15"}
|
141 |
+
{"current_steps": 141, "total_steps": 279, "loss": 0.3335, "lr": 5.8408361412377475e-06, "epoch": 1.5161290322580645, "percentage": 50.54, "elapsed_time": "4:51:34", "remaining_time": "4:45:21"}
|
142 |
+
{"current_steps": 142, "total_steps": 279, "loss": 0.3232, "lr": 5.779081620010104e-06, "epoch": 1.5268817204301075, "percentage": 50.9, "elapsed_time": "4:53:40", "remaining_time": "4:43:20"}
|
143 |
+
{"current_steps": 143, "total_steps": 279, "loss": 0.3392, "lr": 5.717205051084731e-06, "epoch": 1.5376344086021505, "percentage": 51.25, "elapsed_time": "4:55:21", "remaining_time": "4:40:53"}
|
144 |
+
{"current_steps": 144, "total_steps": 279, "loss": 0.3711, "lr": 5.655216127788472e-06, "epoch": 1.5483870967741935, "percentage": 51.61, "elapsed_time": "4:57:17", "remaining_time": "4:38:43"}
|
145 |
+
{"current_steps": 145, "total_steps": 279, "loss": 0.3435, "lr": 5.593124561049141e-06, "epoch": 1.5591397849462365, "percentage": 51.97, "elapsed_time": "4:59:43", "remaining_time": "4:36:58"}
|
146 |
+
{"current_steps": 146, "total_steps": 279, "loss": 0.3617, "lr": 5.530940077874248e-06, "epoch": 1.5698924731182795, "percentage": 52.33, "elapsed_time": "5:01:39", "remaining_time": "4:34:48"}
|
147 |
+
{"current_steps": 147, "total_steps": 279, "loss": 0.3399, "lr": 5.468672419827208e-06, "epoch": 1.5806451612903225, "percentage": 52.69, "elapsed_time": "5:03:43", "remaining_time": "4:32:43"}
|
148 |
+
{"current_steps": 148, "total_steps": 279, "loss": 0.339, "lr": 5.406331341501264e-06, "epoch": 1.5913978494623655, "percentage": 53.05, "elapsed_time": "5:05:34", "remaining_time": "4:30:28"}
|
149 |
+
{"current_steps": 149, "total_steps": 279, "loss": 0.3207, "lr": 5.34392660899138e-06, "epoch": 1.6021505376344085, "percentage": 53.41, "elapsed_time": "5:07:13", "remaining_time": "4:28:02"}
|
150 |
+
{"current_steps": 150, "total_steps": 279, "loss": 0.3308, "lr": 5.281467998364314e-06, "epoch": 1.6129032258064515, "percentage": 53.76, "elapsed_time": "5:08:37", "remaining_time": "4:25:25"}
|
151 |
+
{"current_steps": 151, "total_steps": 279, "loss": 0.3501, "lr": 5.218965294127155e-06, "epoch": 1.6236559139784945, "percentage": 54.12, "elapsed_time": "5:10:40", "remaining_time": "4:23:21"}
|
152 |
+
{"current_steps": 152, "total_steps": 279, "loss": 0.3542, "lr": 5.156428287694508e-06, "epoch": 1.6344086021505375, "percentage": 54.48, "elapsed_time": "5:12:42", "remaining_time": "4:21:16"}
|
153 |
+
{"current_steps": 153, "total_steps": 279, "loss": 0.334, "lr": 5.093866775854618e-06, "epoch": 1.6451612903225805, "percentage": 54.84, "elapsed_time": "5:14:14", "remaining_time": "4:18:47"}
|
154 |
+
{"current_steps": 154, "total_steps": 279, "loss": 0.3124, "lr": 5.03129055923465e-06, "epoch": 1.6559139784946235, "percentage": 55.2, "elapsed_time": "5:16:14", "remaining_time": "4:16:41"}
|
155 |
+
{"current_steps": 155, "total_steps": 279, "loss": 0.3608, "lr": 4.968709440765352e-06, "epoch": 1.6666666666666665, "percentage": 55.56, "elapsed_time": "5:18:04", "remaining_time": "4:14:27"}
|
156 |
+
{"current_steps": 156, "total_steps": 279, "loss": 0.4535, "lr": 4.906133224145384e-06, "epoch": 1.6774193548387095, "percentage": 55.91, "elapsed_time": "5:20:00", "remaining_time": "4:12:18"}
|
157 |
+
{"current_steps": 157, "total_steps": 279, "loss": 0.347, "lr": 4.843571712305493e-06, "epoch": 1.6881720430107527, "percentage": 56.27, "elapsed_time": "5:22:44", "remaining_time": "4:10:47"}
|
158 |
+
{"current_steps": 158, "total_steps": 279, "loss": 0.3429, "lr": 4.781034705872846e-06, "epoch": 1.6989247311827957, "percentage": 56.63, "elapsed_time": "5:24:40", "remaining_time": "4:08:38"}
|
159 |
+
{"current_steps": 159, "total_steps": 279, "loss": 0.353, "lr": 4.7185320016356865e-06, "epoch": 1.7096774193548387, "percentage": 56.99, "elapsed_time": "5:26:42", "remaining_time": "4:06:34"}
|
160 |
+
{"current_steps": 160, "total_steps": 279, "loss": 0.3955, "lr": 4.656073391008622e-06, "epoch": 1.7204301075268817, "percentage": 57.35, "elapsed_time": "5:28:39", "remaining_time": "4:04:26"}
|
161 |
+
{"current_steps": 161, "total_steps": 279, "loss": 0.3522, "lr": 4.593668658498737e-06, "epoch": 1.7311827956989247, "percentage": 57.71, "elapsed_time": "5:30:19", "remaining_time": "4:02:05"}
|
162 |
+
{"current_steps": 162, "total_steps": 279, "loss": 0.3609, "lr": 4.531327580172794e-06, "epoch": 1.7419354838709677, "percentage": 58.06, "elapsed_time": "5:32:26", "remaining_time": "4:00:05"}
|
163 |
+
{"current_steps": 163, "total_steps": 279, "loss": 0.3518, "lr": 4.469059922125753e-06, "epoch": 1.7526881720430108, "percentage": 58.42, "elapsed_time": "5:34:19", "remaining_time": "3:57:55"}
|
164 |
+
{"current_steps": 164, "total_steps": 279, "loss": 0.4151, "lr": 4.4068754389508616e-06, "epoch": 1.7634408602150538, "percentage": 58.78, "elapsed_time": "5:36:06", "remaining_time": "3:55:41"}
|
165 |
+
{"current_steps": 165, "total_steps": 279, "loss": 0.348, "lr": 4.34478387221153e-06, "epoch": 1.7741935483870968, "percentage": 59.14, "elapsed_time": "5:37:53", "remaining_time": "3:53:26"}
|
166 |
+
{"current_steps": 166, "total_steps": 279, "loss": 0.3336, "lr": 4.282794948915271e-06, "epoch": 1.7849462365591398, "percentage": 59.5, "elapsed_time": "5:39:44", "remaining_time": "3:51:16"}
|
167 |
+
{"current_steps": 167, "total_steps": 279, "loss": 0.3815, "lr": 4.220918379989898e-06, "epoch": 1.7956989247311828, "percentage": 59.86, "elapsed_time": "5:41:49", "remaining_time": "3:49:14"}
|
168 |
+
{"current_steps": 168, "total_steps": 279, "loss": 0.3284, "lr": 4.159163858762255e-06, "epoch": 1.8064516129032258, "percentage": 60.22, "elapsed_time": "5:44:07", "remaining_time": "3:47:22"}
|
169 |
+
{"current_steps": 169, "total_steps": 279, "loss": 0.3662, "lr": 4.097541059439698e-06, "epoch": 1.817204301075269, "percentage": 60.57, "elapsed_time": "5:47:02", "remaining_time": "3:45:52"}
|
170 |
+
{"current_steps": 170, "total_steps": 279, "loss": 0.3589, "lr": 4.036059635594578e-06, "epoch": 1.827956989247312, "percentage": 60.93, "elapsed_time": "5:48:55", "remaining_time": "3:43:43"}
|
171 |
+
{"current_steps": 171, "total_steps": 279, "loss": 0.3249, "lr": 3.974729218651946e-06, "epoch": 1.838709677419355, "percentage": 61.29, "elapsed_time": "5:51:02", "remaining_time": "3:41:42"}
|
172 |
+
{"current_steps": 172, "total_steps": 279, "loss": 0.3605, "lr": 3.913559416380743e-06, "epoch": 1.849462365591398, "percentage": 61.65, "elapsed_time": "5:53:14", "remaining_time": "3:39:44"}
|
173 |
+
{"current_steps": 173, "total_steps": 279, "loss": 0.3622, "lr": 3.852559811388676e-06, "epoch": 1.860215053763441, "percentage": 62.01, "elapsed_time": "5:55:14", "remaining_time": "3:37:39"}
|
174 |
+
{"current_steps": 174, "total_steps": 279, "loss": 0.3159, "lr": 3.791739959621054e-06, "epoch": 1.870967741935484, "percentage": 62.37, "elapsed_time": "5:56:59", "remaining_time": "3:35:25"}
|
175 |
+
{"current_steps": 175, "total_steps": 279, "loss": 0.37, "lr": 3.7311093888637906e-06, "epoch": 1.881720430107527, "percentage": 62.72, "elapsed_time": "5:58:55", "remaining_time": "3:33:18"}
|
176 |
+
{"current_steps": 176, "total_steps": 279, "loss": 0.3628, "lr": 3.670677597250819e-06, "epoch": 1.89247311827957, "percentage": 63.08, "elapsed_time": "6:00:47", "remaining_time": "3:31:08"}
|
177 |
+
{"current_steps": 177, "total_steps": 279, "loss": 0.3525, "lr": 3.6104540517761594e-06, "epoch": 1.903225806451613, "percentage": 63.44, "elapsed_time": "6:02:27", "remaining_time": "3:28:52"}
|
178 |
+
{"current_steps": 178, "total_steps": 279, "loss": 0.3224, "lr": 3.55044818681085e-06, "epoch": 1.913978494623656, "percentage": 63.8, "elapsed_time": "6:04:13", "remaining_time": "3:26:40"}
|
179 |
+
{"current_steps": 179, "total_steps": 279, "loss": 0.3474, "lr": 3.4906694026250075e-06, "epoch": 1.924731182795699, "percentage": 64.16, "elapsed_time": "6:06:39", "remaining_time": "3:24:49"}
|
180 |
+
{"current_steps": 180, "total_steps": 279, "loss": 0.3339, "lr": 3.431127063915213e-06, "epoch": 1.935483870967742, "percentage": 64.52, "elapsed_time": "6:08:17", "remaining_time": "3:22:33"}
|
181 |
+
{"current_steps": 181, "total_steps": 279, "loss": 0.3123, "lr": 3.371830498337475e-06, "epoch": 1.946236559139785, "percentage": 64.87, "elapsed_time": "6:10:39", "remaining_time": "3:20:41"}
|
182 |
+
{"current_steps": 182, "total_steps": 279, "loss": 0.343, "lr": 3.3127889950460094e-06, "epoch": 1.956989247311828, "percentage": 65.23, "elapsed_time": "6:12:55", "remaining_time": "3:18:45"}
|
183 |
+
{"current_steps": 183, "total_steps": 279, "loss": 0.3402, "lr": 3.254011803238026e-06, "epoch": 1.967741935483871, "percentage": 65.59, "elapsed_time": "6:14:37", "remaining_time": "3:16:31"}
|
184 |
+
{"current_steps": 184, "total_steps": 279, "loss": 0.3766, "lr": 3.195508130704795e-06, "epoch": 1.978494623655914, "percentage": 65.95, "elapsed_time": "6:17:05", "remaining_time": "3:14:41"}
|
185 |
+
{"current_steps": 185, "total_steps": 279, "loss": 0.3827, "lr": 3.1372871423891894e-06, "epoch": 1.989247311827957, "percentage": 66.31, "elapsed_time": "6:19:23", "remaining_time": "3:12:46"}
|
186 |
+
{"current_steps": 186, "total_steps": 279, "loss": 0.3326, "lr": 3.079357958949946e-06, "epoch": 2.0, "percentage": 66.67, "elapsed_time": "6:21:18", "remaining_time": "3:10:39"}
|
187 |
+
{"current_steps": 187, "total_steps": 279, "loss": 0.2291, "lr": 3.021729655332858e-06, "epoch": 2.010752688172043, "percentage": 67.03, "elapsed_time": "6:22:52", "remaining_time": "3:08:21"}
|
188 |
+
{"current_steps": 188, "total_steps": 279, "loss": 0.2793, "lr": 2.9644112593491315e-06, "epoch": 2.021505376344086, "percentage": 67.38, "elapsed_time": "6:24:51", "remaining_time": "3:06:17"}
|
189 |
+
{"current_steps": 189, "total_steps": 279, "loss": 0.2318, "lr": 2.90741175026113e-06, "epoch": 2.032258064516129, "percentage": 67.74, "elapsed_time": "6:26:16", "remaining_time": "3:03:56"}
|
190 |
+
{"current_steps": 190, "total_steps": 279, "loss": 0.2268, "lr": 2.850740057375716e-06, "epoch": 2.043010752688172, "percentage": 68.1, "elapsed_time": "6:28:21", "remaining_time": "3:01:55"}
|
191 |
+
{"current_steps": 191, "total_steps": 279, "loss": 0.2683, "lr": 2.7944050586454215e-06, "epoch": 2.053763440860215, "percentage": 68.46, "elapsed_time": "6:30:21", "remaining_time": "2:59:50"}
|
192 |
+
{"current_steps": 192, "total_steps": 279, "loss": 0.2025, "lr": 2.7384155792776724e-06, "epoch": 2.064516129032258, "percentage": 68.82, "elapsed_time": "6:31:47", "remaining_time": "2:57:31"}
|
193 |
+
{"current_steps": 193, "total_steps": 279, "loss": 0.2393, "lr": 2.682780390352262e-06, "epoch": 2.075268817204301, "percentage": 69.18, "elapsed_time": "6:33:30", "remaining_time": "2:55:20"}
|
194 |
+
{"current_steps": 194, "total_steps": 279, "loss": 0.2294, "lr": 2.627508207447308e-06, "epoch": 2.086021505376344, "percentage": 69.53, "elapsed_time": "6:35:12", "remaining_time": "2:53:09"}
|
195 |
+
{"current_steps": 195, "total_steps": 279, "loss": 0.2282, "lr": 2.5726076892739127e-06, "epoch": 2.096774193548387, "percentage": 69.89, "elapsed_time": "6:36:53", "remaining_time": "2:50:58"}
|
196 |
+
{"current_steps": 196, "total_steps": 279, "loss": 0.4074, "lr": 2.5180874363197217e-06, "epoch": 2.10752688172043, "percentage": 70.25, "elapsed_time": "6:38:26", "remaining_time": "2:48:43"}
|
197 |
+
{"current_steps": 197, "total_steps": 279, "loss": 0.2049, "lr": 2.463955989501607e-06, "epoch": 2.118279569892473, "percentage": 70.61, "elapsed_time": "6:40:48", "remaining_time": "2:46:49"}
|
198 |
+
{"current_steps": 198, "total_steps": 279, "loss": 0.2867, "lr": 2.41022182882768e-06, "epoch": 2.129032258064516, "percentage": 70.97, "elapsed_time": "6:42:30", "remaining_time": "2:44:39"}
|
199 |
+
{"current_steps": 199, "total_steps": 279, "loss": 0.2632, "lr": 2.356893372068855e-06, "epoch": 2.139784946236559, "percentage": 71.33, "elapsed_time": "6:44:51", "remaining_time": "2:42:45"}
|
200 |
+
{"current_steps": 200, "total_steps": 279, "loss": 0.2136, "lr": 2.3039789734401524e-06, "epoch": 2.150537634408602, "percentage": 71.68, "elapsed_time": "6:47:58", "remaining_time": "2:41:09"}
|
201 |
+
{"current_steps": 201, "total_steps": 279, "loss": 0.2032, "lr": 2.251486922291957e-06, "epoch": 2.161290322580645, "percentage": 72.04, "elapsed_time": "6:52:09", "remaining_time": "2:39:56"}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1687b082a1467c3ecffda1f0b9c769eac9bdc0904cd2f1c065471f6dc44d008e
|
3 |
+
size 7928
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|