Training in progress, step 200
Browse files- .gitattributes +1 -0
- config.json +30 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +346 -0
- special_tokens_map.json +32 -0
- tokenizer.json +3 -0
- tokenizer_config.json +208 -0
- trainer_log.jsonl +212 -0
- training_args.bin +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
config.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
|
3 |
+
"architectures": [
|
4 |
+
"Qwen2ForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 151643,
|
8 |
+
"eos_token_id": 151643,
|
9 |
+
"hidden_act": "silu",
|
10 |
+
"hidden_size": 3584,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 18944,
|
13 |
+
"max_position_embeddings": 131072,
|
14 |
+
"max_window_layers": 28,
|
15 |
+
"model_type": "qwen2",
|
16 |
+
"num_attention_heads": 28,
|
17 |
+
"num_hidden_layers": 28,
|
18 |
+
"num_key_value_heads": 4,
|
19 |
+
"rms_norm_eps": 1e-06,
|
20 |
+
"rope_scaling": null,
|
21 |
+
"rope_theta": 10000,
|
22 |
+
"sliding_window": null,
|
23 |
+
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "bfloat16",
|
25 |
+
"transformers_version": "4.48.3",
|
26 |
+
"use_cache": false,
|
27 |
+
"use_mrope": false,
|
28 |
+
"use_sliding_window": false,
|
29 |
+
"vocab_size": 152064
|
30 |
+
}
|
model-00001-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:02b7a9768f9ccb64a9ac9beab1cbc721afd7bb790912172166d822693fbc5edb
|
3 |
+
size 4877660776
|
model-00002-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:572d32f9248ba1415e1463aab55ae2896891a7563f3975adcb69a6535c17e597
|
3 |
+
size 4932751008
|
model-00003-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b69e957d4b9b17b6b5dffc7fb794cb2fe137834bca12bce5328c0b1294021879
|
3 |
+
size 4330865200
|
model-00004-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b8d2721388940e1ad9924dad44372b9902a00053aceef5e34c24f66d74a826c7
|
3 |
+
size 1089994880
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 15231233024
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00004-of-00004.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
|
8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
92 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
104 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
116 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
212 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
224 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
236 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
248 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
|
249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
|
250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
|
251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
|
252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
|
253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
|
254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
|
255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
|
256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
|
257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
|
258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
|
259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
|
260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
|
309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
|
310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
|
311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
|
312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
|
313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
320 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
|
326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
|
327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
|
328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
|
329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
|
330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
|
331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
|
332 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
|
333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
|
334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
|
335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
|
336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
|
337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
|
338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
|
339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
|
340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
|
341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
|
342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
|
343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
|
344 |
+
"model.norm.weight": "model-00003-of-00004.safetensors"
|
345 |
+
}
|
346 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
{
|
4 |
+
"content": "<|im_end|>",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false
|
9 |
+
}
|
10 |
+
],
|
11 |
+
"bos_token": {
|
12 |
+
"content": "<|begin▁of▁sentence|>",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false
|
17 |
+
},
|
18 |
+
"eos_token": {
|
19 |
+
"content": "<|end▁of▁sentence|>",
|
20 |
+
"lstrip": false,
|
21 |
+
"normalized": false,
|
22 |
+
"rstrip": false,
|
23 |
+
"single_word": false
|
24 |
+
},
|
25 |
+
"pad_token": {
|
26 |
+
"content": "<|end▁of▁sentence|>",
|
27 |
+
"lstrip": false,
|
28 |
+
"normalized": false,
|
29 |
+
"rstrip": false,
|
30 |
+
"single_word": false
|
31 |
+
}
|
32 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:02643f00207dfc5ed248992486bde04314c21dca556bf65ce520690962b8db63
|
3 |
+
size 11422965
|
tokenizer_config.json
ADDED
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"151643": {
|
7 |
+
"content": "<|end▁of▁sentence|>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"151644": {
|
15 |
+
"content": "<|User|>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": false
|
21 |
+
},
|
22 |
+
"151645": {
|
23 |
+
"content": "<|Assistant|>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": false
|
29 |
+
},
|
30 |
+
"151646": {
|
31 |
+
"content": "<|begin▁of▁sentence|>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"151647": {
|
39 |
+
"content": "<|EOT|>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": false
|
45 |
+
},
|
46 |
+
"151648": {
|
47 |
+
"content": "<think>",
|
48 |
+
"lstrip": false,
|
49 |
+
"normalized": false,
|
50 |
+
"rstrip": false,
|
51 |
+
"single_word": false,
|
52 |
+
"special": false
|
53 |
+
},
|
54 |
+
"151649": {
|
55 |
+
"content": "</think>",
|
56 |
+
"lstrip": false,
|
57 |
+
"normalized": false,
|
58 |
+
"rstrip": false,
|
59 |
+
"single_word": false,
|
60 |
+
"special": false
|
61 |
+
},
|
62 |
+
"151650": {
|
63 |
+
"content": "<|quad_start|>",
|
64 |
+
"lstrip": false,
|
65 |
+
"normalized": false,
|
66 |
+
"rstrip": false,
|
67 |
+
"single_word": false,
|
68 |
+
"special": true
|
69 |
+
},
|
70 |
+
"151651": {
|
71 |
+
"content": "<|quad_end|>",
|
72 |
+
"lstrip": false,
|
73 |
+
"normalized": false,
|
74 |
+
"rstrip": false,
|
75 |
+
"single_word": false,
|
76 |
+
"special": true
|
77 |
+
},
|
78 |
+
"151652": {
|
79 |
+
"content": "<|vision_start|>",
|
80 |
+
"lstrip": false,
|
81 |
+
"normalized": false,
|
82 |
+
"rstrip": false,
|
83 |
+
"single_word": false,
|
84 |
+
"special": true
|
85 |
+
},
|
86 |
+
"151653": {
|
87 |
+
"content": "<|vision_end|>",
|
88 |
+
"lstrip": false,
|
89 |
+
"normalized": false,
|
90 |
+
"rstrip": false,
|
91 |
+
"single_word": false,
|
92 |
+
"special": true
|
93 |
+
},
|
94 |
+
"151654": {
|
95 |
+
"content": "<|vision_pad|>",
|
96 |
+
"lstrip": false,
|
97 |
+
"normalized": false,
|
98 |
+
"rstrip": false,
|
99 |
+
"single_word": false,
|
100 |
+
"special": true
|
101 |
+
},
|
102 |
+
"151655": {
|
103 |
+
"content": "<|image_pad|>",
|
104 |
+
"lstrip": false,
|
105 |
+
"normalized": false,
|
106 |
+
"rstrip": false,
|
107 |
+
"single_word": false,
|
108 |
+
"special": true
|
109 |
+
},
|
110 |
+
"151656": {
|
111 |
+
"content": "<|video_pad|>",
|
112 |
+
"lstrip": false,
|
113 |
+
"normalized": false,
|
114 |
+
"rstrip": false,
|
115 |
+
"single_word": false,
|
116 |
+
"special": true
|
117 |
+
},
|
118 |
+
"151657": {
|
119 |
+
"content": "<tool_call>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false,
|
124 |
+
"special": false
|
125 |
+
},
|
126 |
+
"151658": {
|
127 |
+
"content": "</tool_call>",
|
128 |
+
"lstrip": false,
|
129 |
+
"normalized": false,
|
130 |
+
"rstrip": false,
|
131 |
+
"single_word": false,
|
132 |
+
"special": false
|
133 |
+
},
|
134 |
+
"151659": {
|
135 |
+
"content": "<|fim_prefix|>",
|
136 |
+
"lstrip": false,
|
137 |
+
"normalized": false,
|
138 |
+
"rstrip": false,
|
139 |
+
"single_word": false,
|
140 |
+
"special": false
|
141 |
+
},
|
142 |
+
"151660": {
|
143 |
+
"content": "<|fim_middle|>",
|
144 |
+
"lstrip": false,
|
145 |
+
"normalized": false,
|
146 |
+
"rstrip": false,
|
147 |
+
"single_word": false,
|
148 |
+
"special": false
|
149 |
+
},
|
150 |
+
"151661": {
|
151 |
+
"content": "<|fim_suffix|>",
|
152 |
+
"lstrip": false,
|
153 |
+
"normalized": false,
|
154 |
+
"rstrip": false,
|
155 |
+
"single_word": false,
|
156 |
+
"special": false
|
157 |
+
},
|
158 |
+
"151662": {
|
159 |
+
"content": "<|fim_pad|>",
|
160 |
+
"lstrip": false,
|
161 |
+
"normalized": false,
|
162 |
+
"rstrip": false,
|
163 |
+
"single_word": false,
|
164 |
+
"special": false
|
165 |
+
},
|
166 |
+
"151663": {
|
167 |
+
"content": "<|repo_name|>",
|
168 |
+
"lstrip": false,
|
169 |
+
"normalized": false,
|
170 |
+
"rstrip": false,
|
171 |
+
"single_word": false,
|
172 |
+
"special": false
|
173 |
+
},
|
174 |
+
"151664": {
|
175 |
+
"content": "<|file_sep|>",
|
176 |
+
"lstrip": false,
|
177 |
+
"normalized": false,
|
178 |
+
"rstrip": false,
|
179 |
+
"single_word": false,
|
180 |
+
"special": false
|
181 |
+
},
|
182 |
+
"151665": {
|
183 |
+
"content": "<|im_end|>",
|
184 |
+
"lstrip": false,
|
185 |
+
"normalized": false,
|
186 |
+
"rstrip": false,
|
187 |
+
"single_word": false,
|
188 |
+
"special": true
|
189 |
+
}
|
190 |
+
},
|
191 |
+
"additional_special_tokens": [
|
192 |
+
"<|im_end|>"
|
193 |
+
],
|
194 |
+
"bos_token": "<|begin▁of▁sentence|>",
|
195 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
|
196 |
+
"clean_up_tokenization_spaces": false,
|
197 |
+
"eos_token": "<|end▁of▁sentence|>",
|
198 |
+
"extra_special_tokens": {},
|
199 |
+
"legacy": true,
|
200 |
+
"model_max_length": 16384,
|
201 |
+
"pad_token": "<|end▁of▁sentence|>",
|
202 |
+
"padding_side": "right",
|
203 |
+
"sp_model_kwargs": {},
|
204 |
+
"split_special_tokens": false,
|
205 |
+
"tokenizer_class": "LlamaTokenizer",
|
206 |
+
"unk_token": null,
|
207 |
+
"use_default_system_prompt": false
|
208 |
+
}
|
trainer_log.jsonl
ADDED
@@ -0,0 +1,212 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"current_steps": 1, "total_steps": 309, "loss": 1.2207, "lr": 3.2258064516129035e-07, "epoch": 0.00963855421686747, "percentage": 0.32, "elapsed_time": "0:00:16", "remaining_time": "1:24:13"}
|
2 |
+
{"current_steps": 2, "total_steps": 309, "loss": 1.1371, "lr": 6.451612903225807e-07, "epoch": 0.01927710843373494, "percentage": 0.65, "elapsed_time": "0:00:26", "remaining_time": "1:08:07"}
|
3 |
+
{"current_steps": 3, "total_steps": 309, "loss": 1.1605, "lr": 9.67741935483871e-07, "epoch": 0.02891566265060241, "percentage": 0.97, "elapsed_time": "0:00:35", "remaining_time": "1:01:09"}
|
4 |
+
{"current_steps": 4, "total_steps": 309, "loss": 1.297, "lr": 1.2903225806451614e-06, "epoch": 0.03855421686746988, "percentage": 1.29, "elapsed_time": "0:00:43", "remaining_time": "0:54:39"}
|
5 |
+
{"current_steps": 5, "total_steps": 309, "loss": 1.2186, "lr": 1.6129032258064516e-06, "epoch": 0.04819277108433735, "percentage": 1.62, "elapsed_time": "0:00:51", "remaining_time": "0:52:05"}
|
6 |
+
{"current_steps": 6, "total_steps": 309, "loss": 1.1326, "lr": 1.935483870967742e-06, "epoch": 0.05783132530120482, "percentage": 1.94, "elapsed_time": "0:01:00", "remaining_time": "0:51:11"}
|
7 |
+
{"current_steps": 7, "total_steps": 309, "loss": 1.1416, "lr": 2.2580645161290324e-06, "epoch": 0.06746987951807229, "percentage": 2.27, "elapsed_time": "0:01:09", "remaining_time": "0:50:16"}
|
8 |
+
{"current_steps": 8, "total_steps": 309, "loss": 1.2283, "lr": 2.580645161290323e-06, "epoch": 0.07710843373493977, "percentage": 2.59, "elapsed_time": "0:01:18", "remaining_time": "0:49:18"}
|
9 |
+
{"current_steps": 9, "total_steps": 309, "loss": 1.1301, "lr": 2.903225806451613e-06, "epoch": 0.08674698795180723, "percentage": 2.91, "elapsed_time": "0:01:28", "remaining_time": "0:49:02"}
|
10 |
+
{"current_steps": 10, "total_steps": 309, "loss": 1.1998, "lr": 3.225806451612903e-06, "epoch": 0.0963855421686747, "percentage": 3.24, "elapsed_time": "0:01:36", "remaining_time": "0:47:51"}
|
11 |
+
{"current_steps": 11, "total_steps": 309, "loss": 1.1185, "lr": 3.548387096774194e-06, "epoch": 0.10602409638554217, "percentage": 3.56, "elapsed_time": "0:01:44", "remaining_time": "0:47:04"}
|
12 |
+
{"current_steps": 12, "total_steps": 309, "loss": 1.0715, "lr": 3.870967741935484e-06, "epoch": 0.11566265060240964, "percentage": 3.88, "elapsed_time": "0:01:51", "remaining_time": "0:45:58"}
|
13 |
+
{"current_steps": 13, "total_steps": 309, "loss": 1.0387, "lr": 4.193548387096774e-06, "epoch": 0.12530120481927712, "percentage": 4.21, "elapsed_time": "0:01:59", "remaining_time": "0:45:21"}
|
14 |
+
{"current_steps": 14, "total_steps": 309, "loss": 1.0138, "lr": 4.516129032258065e-06, "epoch": 0.13493975903614458, "percentage": 4.53, "elapsed_time": "0:02:09", "remaining_time": "0:45:20"}
|
15 |
+
{"current_steps": 15, "total_steps": 309, "loss": 1.0815, "lr": 4.838709677419355e-06, "epoch": 0.14457831325301204, "percentage": 4.85, "elapsed_time": "0:02:18", "remaining_time": "0:45:15"}
|
16 |
+
{"current_steps": 16, "total_steps": 309, "loss": 0.9954, "lr": 5.161290322580646e-06, "epoch": 0.15421686746987953, "percentage": 5.18, "elapsed_time": "0:02:27", "remaining_time": "0:45:08"}
|
17 |
+
{"current_steps": 17, "total_steps": 309, "loss": 0.9345, "lr": 5.483870967741935e-06, "epoch": 0.163855421686747, "percentage": 5.5, "elapsed_time": "0:02:38", "remaining_time": "0:45:24"}
|
18 |
+
{"current_steps": 18, "total_steps": 309, "loss": 1.0057, "lr": 5.806451612903226e-06, "epoch": 0.17349397590361446, "percentage": 5.83, "elapsed_time": "0:02:48", "remaining_time": "0:45:23"}
|
19 |
+
{"current_steps": 19, "total_steps": 309, "loss": 0.961, "lr": 6.129032258064517e-06, "epoch": 0.18313253012048192, "percentage": 6.15, "elapsed_time": "0:02:57", "remaining_time": "0:45:07"}
|
20 |
+
{"current_steps": 20, "total_steps": 309, "loss": 0.9732, "lr": 6.451612903225806e-06, "epoch": 0.1927710843373494, "percentage": 6.47, "elapsed_time": "0:03:06", "remaining_time": "0:44:48"}
|
21 |
+
{"current_steps": 21, "total_steps": 309, "loss": 0.9114, "lr": 6.774193548387097e-06, "epoch": 0.20240963855421687, "percentage": 6.8, "elapsed_time": "0:03:15", "remaining_time": "0:44:40"}
|
22 |
+
{"current_steps": 22, "total_steps": 309, "loss": 0.9238, "lr": 7.096774193548388e-06, "epoch": 0.21204819277108433, "percentage": 7.12, "elapsed_time": "0:03:26", "remaining_time": "0:44:49"}
|
23 |
+
{"current_steps": 23, "total_steps": 309, "loss": 0.9423, "lr": 7.4193548387096784e-06, "epoch": 0.2216867469879518, "percentage": 7.44, "elapsed_time": "0:03:35", "remaining_time": "0:44:33"}
|
24 |
+
{"current_steps": 24, "total_steps": 309, "loss": 0.8987, "lr": 7.741935483870968e-06, "epoch": 0.23132530120481928, "percentage": 7.77, "elapsed_time": "0:03:45", "remaining_time": "0:44:39"}
|
25 |
+
{"current_steps": 25, "total_steps": 309, "loss": 0.923, "lr": 8.064516129032258e-06, "epoch": 0.24096385542168675, "percentage": 8.09, "elapsed_time": "0:03:54", "remaining_time": "0:44:21"}
|
26 |
+
{"current_steps": 26, "total_steps": 309, "loss": 0.8432, "lr": 8.387096774193549e-06, "epoch": 0.25060240963855424, "percentage": 8.41, "elapsed_time": "0:04:04", "remaining_time": "0:44:16"}
|
27 |
+
{"current_steps": 27, "total_steps": 309, "loss": 0.8921, "lr": 8.70967741935484e-06, "epoch": 0.26024096385542167, "percentage": 8.74, "elapsed_time": "0:04:12", "remaining_time": "0:43:56"}
|
28 |
+
{"current_steps": 28, "total_steps": 309, "loss": 0.891, "lr": 9.03225806451613e-06, "epoch": 0.26987951807228916, "percentage": 9.06, "elapsed_time": "0:04:21", "remaining_time": "0:43:41"}
|
29 |
+
{"current_steps": 29, "total_steps": 309, "loss": 0.8328, "lr": 9.35483870967742e-06, "epoch": 0.27951807228915665, "percentage": 9.39, "elapsed_time": "0:04:30", "remaining_time": "0:43:31"}
|
30 |
+
{"current_steps": 30, "total_steps": 309, "loss": 0.8109, "lr": 9.67741935483871e-06, "epoch": 0.2891566265060241, "percentage": 9.71, "elapsed_time": "0:04:40", "remaining_time": "0:43:29"}
|
31 |
+
{"current_steps": 31, "total_steps": 309, "loss": 0.8327, "lr": 1e-05, "epoch": 0.2987951807228916, "percentage": 10.03, "elapsed_time": "0:04:51", "remaining_time": "0:43:31"}
|
32 |
+
{"current_steps": 32, "total_steps": 309, "loss": 0.8228, "lr": 9.999680739242022e-06, "epoch": 0.30843373493975906, "percentage": 10.36, "elapsed_time": "0:05:01", "remaining_time": "0:43:28"}
|
33 |
+
{"current_steps": 33, "total_steps": 309, "loss": 0.8184, "lr": 9.99872299773906e-06, "epoch": 0.3180722891566265, "percentage": 10.68, "elapsed_time": "0:05:09", "remaining_time": "0:43:09"}
|
34 |
+
{"current_steps": 34, "total_steps": 309, "loss": 0.8431, "lr": 9.997126897798826e-06, "epoch": 0.327710843373494, "percentage": 11.0, "elapsed_time": "0:05:18", "remaining_time": "0:42:59"}
|
35 |
+
{"current_steps": 35, "total_steps": 309, "loss": 0.8436, "lr": 9.994892643250147e-06, "epoch": 0.3373493975903614, "percentage": 11.33, "elapsed_time": "0:05:28", "remaining_time": "0:42:53"}
|
36 |
+
{"current_steps": 36, "total_steps": 309, "loss": 0.8124, "lr": 9.99202051941695e-06, "epoch": 0.3469879518072289, "percentage": 11.65, "elapsed_time": "0:05:37", "remaining_time": "0:42:42"}
|
37 |
+
{"current_steps": 37, "total_steps": 309, "loss": 0.8402, "lr": 9.9885108930818e-06, "epoch": 0.3566265060240964, "percentage": 11.97, "elapsed_time": "0:05:46", "remaining_time": "0:42:29"}
|
38 |
+
{"current_steps": 38, "total_steps": 309, "loss": 0.7863, "lr": 9.984364212439089e-06, "epoch": 0.36626506024096384, "percentage": 12.3, "elapsed_time": "0:05:55", "remaining_time": "0:42:15"}
|
39 |
+
{"current_steps": 39, "total_steps": 309, "loss": 0.7494, "lr": 9.979581007037776e-06, "epoch": 0.3759036144578313, "percentage": 12.62, "elapsed_time": "0:06:04", "remaining_time": "0:42:00"}
|
40 |
+
{"current_steps": 40, "total_steps": 309, "loss": 0.7733, "lr": 9.974161887713775e-06, "epoch": 0.3855421686746988, "percentage": 12.94, "elapsed_time": "0:06:14", "remaining_time": "0:41:56"}
|
41 |
+
{"current_steps": 41, "total_steps": 309, "loss": 0.8069, "lr": 9.968107546511942e-06, "epoch": 0.39518072289156625, "percentage": 13.27, "elapsed_time": "0:06:24", "remaining_time": "0:41:51"}
|
42 |
+
{"current_steps": 42, "total_steps": 309, "loss": 0.7544, "lr": 9.961418756597703e-06, "epoch": 0.40481927710843374, "percentage": 13.59, "elapsed_time": "0:06:33", "remaining_time": "0:41:39"}
|
43 |
+
{"current_steps": 43, "total_steps": 309, "loss": 0.8046, "lr": 9.95409637215831e-06, "epoch": 0.41445783132530123, "percentage": 13.92, "elapsed_time": "0:06:41", "remaining_time": "0:41:25"}
|
44 |
+
{"current_steps": 44, "total_steps": 309, "loss": 0.7887, "lr": 9.94614132829377e-06, "epoch": 0.42409638554216866, "percentage": 14.24, "elapsed_time": "0:06:52", "remaining_time": "0:41:24"}
|
45 |
+
{"current_steps": 45, "total_steps": 309, "loss": 0.806, "lr": 9.937554640897414e-06, "epoch": 0.43373493975903615, "percentage": 14.56, "elapsed_time": "0:07:00", "remaining_time": "0:41:09"}
|
46 |
+
{"current_steps": 46, "total_steps": 309, "loss": 0.8187, "lr": 9.928337406526172e-06, "epoch": 0.4433734939759036, "percentage": 14.89, "elapsed_time": "0:07:09", "remaining_time": "0:40:57"}
|
47 |
+
{"current_steps": 47, "total_steps": 309, "loss": 0.7442, "lr": 9.918490802260538e-06, "epoch": 0.4530120481927711, "percentage": 15.21, "elapsed_time": "0:07:19", "remaining_time": "0:40:49"}
|
48 |
+
{"current_steps": 48, "total_steps": 309, "loss": 0.7288, "lr": 9.90801608555425e-06, "epoch": 0.46265060240963857, "percentage": 15.53, "elapsed_time": "0:07:29", "remaining_time": "0:40:43"}
|
49 |
+
{"current_steps": 49, "total_steps": 309, "loss": 0.7876, "lr": 9.896914594073703e-06, "epoch": 0.472289156626506, "percentage": 15.86, "elapsed_time": "0:07:38", "remaining_time": "0:40:32"}
|
50 |
+
{"current_steps": 50, "total_steps": 309, "loss": 0.7958, "lr": 9.885187745527132e-06, "epoch": 0.4819277108433735, "percentage": 16.18, "elapsed_time": "0:07:49", "remaining_time": "0:40:32"}
|
51 |
+
{"current_steps": 51, "total_steps": 309, "loss": 0.7526, "lr": 9.87283703748356e-06, "epoch": 0.491566265060241, "percentage": 16.5, "elapsed_time": "0:07:59", "remaining_time": "0:40:25"}
|
52 |
+
{"current_steps": 52, "total_steps": 309, "loss": 0.771, "lr": 9.859864047181551e-06, "epoch": 0.5012048192771085, "percentage": 16.83, "elapsed_time": "0:08:07", "remaining_time": "0:40:07"}
|
53 |
+
{"current_steps": 53, "total_steps": 309, "loss": 0.7916, "lr": 9.846270431327793e-06, "epoch": 0.5108433734939759, "percentage": 17.15, "elapsed_time": "0:08:16", "remaining_time": "0:39:58"}
|
54 |
+
{"current_steps": 54, "total_steps": 309, "loss": 0.7394, "lr": 9.832057925885526e-06, "epoch": 0.5204819277108433, "percentage": 17.48, "elapsed_time": "0:08:26", "remaining_time": "0:39:50"}
|
55 |
+
{"current_steps": 55, "total_steps": 309, "loss": 0.7336, "lr": 9.817228345852853e-06, "epoch": 0.5301204819277109, "percentage": 17.8, "elapsed_time": "0:08:34", "remaining_time": "0:39:34"}
|
56 |
+
{"current_steps": 56, "total_steps": 309, "loss": 0.7373, "lr": 9.801783585030959e-06, "epoch": 0.5397590361445783, "percentage": 18.12, "elapsed_time": "0:08:44", "remaining_time": "0:39:29"}
|
57 |
+
{"current_steps": 57, "total_steps": 309, "loss": 0.7453, "lr": 9.785725615782262e-06, "epoch": 0.5493975903614458, "percentage": 18.45, "elapsed_time": "0:08:53", "remaining_time": "0:39:19"}
|
58 |
+
{"current_steps": 58, "total_steps": 309, "loss": 0.7468, "lr": 9.769056488778538e-06, "epoch": 0.5590361445783133, "percentage": 18.77, "elapsed_time": "0:09:02", "remaining_time": "0:39:05"}
|
59 |
+
{"current_steps": 59, "total_steps": 309, "loss": 0.6937, "lr": 9.751778332739033e-06, "epoch": 0.5686746987951807, "percentage": 19.09, "elapsed_time": "0:09:13", "remaining_time": "0:39:03"}
|
60 |
+
{"current_steps": 60, "total_steps": 309, "loss": 0.724, "lr": 9.733893354158628e-06, "epoch": 0.5783132530120482, "percentage": 19.42, "elapsed_time": "0:09:21", "remaining_time": "0:38:49"}
|
61 |
+
{"current_steps": 61, "total_steps": 309, "loss": 0.7357, "lr": 9.715403837026046e-06, "epoch": 0.5879518072289157, "percentage": 19.74, "elapsed_time": "0:09:30", "remaining_time": "0:38:38"}
|
62 |
+
{"current_steps": 62, "total_steps": 309, "loss": 0.7634, "lr": 9.69631214253219e-06, "epoch": 0.5975903614457831, "percentage": 20.06, "elapsed_time": "0:09:39", "remaining_time": "0:38:26"}
|
63 |
+
{"current_steps": 63, "total_steps": 309, "loss": 0.7507, "lr": 9.676620708768608e-06, "epoch": 0.6072289156626506, "percentage": 20.39, "elapsed_time": "0:09:48", "remaining_time": "0:38:18"}
|
64 |
+
{"current_steps": 64, "total_steps": 309, "loss": 0.7995, "lr": 9.656332050416118e-06, "epoch": 0.6168674698795181, "percentage": 20.71, "elapsed_time": "0:09:58", "remaining_time": "0:38:10"}
|
65 |
+
{"current_steps": 65, "total_steps": 309, "loss": 0.7284, "lr": 9.635448758423703e-06, "epoch": 0.6265060240963856, "percentage": 21.04, "elapsed_time": "0:10:06", "remaining_time": "0:37:57"}
|
66 |
+
{"current_steps": 66, "total_steps": 309, "loss": 0.7757, "lr": 9.613973499677613e-06, "epoch": 0.636144578313253, "percentage": 21.36, "elapsed_time": "0:10:15", "remaining_time": "0:37:46"}
|
67 |
+
{"current_steps": 67, "total_steps": 309, "loss": 0.7109, "lr": 9.591909016660806e-06, "epoch": 0.6457831325301204, "percentage": 21.68, "elapsed_time": "0:10:25", "remaining_time": "0:37:40"}
|
68 |
+
{"current_steps": 68, "total_steps": 309, "loss": 0.6956, "lr": 9.569258127102708e-06, "epoch": 0.655421686746988, "percentage": 22.01, "elapsed_time": "0:10:35", "remaining_time": "0:37:32"}
|
69 |
+
{"current_steps": 69, "total_steps": 309, "loss": 0.6946, "lr": 9.546023723619387e-06, "epoch": 0.6650602409638554, "percentage": 22.33, "elapsed_time": "0:10:45", "remaining_time": "0:37:23"}
|
70 |
+
{"current_steps": 70, "total_steps": 309, "loss": 0.6818, "lr": 9.522208773344147e-06, "epoch": 0.6746987951807228, "percentage": 22.65, "elapsed_time": "0:10:53", "remaining_time": "0:37:12"}
|
71 |
+
{"current_steps": 71, "total_steps": 309, "loss": 0.7371, "lr": 9.497816317548625e-06, "epoch": 0.6843373493975904, "percentage": 22.98, "elapsed_time": "0:11:02", "remaining_time": "0:37:01"}
|
72 |
+
{"current_steps": 72, "total_steps": 309, "loss": 0.7471, "lr": 9.472849471254386e-06, "epoch": 0.6939759036144578, "percentage": 23.3, "elapsed_time": "0:11:11", "remaining_time": "0:36:49"}
|
73 |
+
{"current_steps": 73, "total_steps": 309, "loss": 0.7603, "lr": 9.447311422835141e-06, "epoch": 0.7036144578313253, "percentage": 23.62, "elapsed_time": "0:11:20", "remaining_time": "0:36:39"}
|
74 |
+
{"current_steps": 74, "total_steps": 309, "loss": 0.6906, "lr": 9.421205433609568e-06, "epoch": 0.7132530120481928, "percentage": 23.95, "elapsed_time": "0:11:29", "remaining_time": "0:36:30"}
|
75 |
+
{"current_steps": 75, "total_steps": 309, "loss": 0.688, "lr": 9.39453483742483e-06, "epoch": 0.7228915662650602, "percentage": 24.27, "elapsed_time": "0:11:38", "remaining_time": "0:36:19"}
|
76 |
+
{"current_steps": 76, "total_steps": 309, "loss": 0.6933, "lr": 9.367303040230828e-06, "epoch": 0.7325301204819277, "percentage": 24.6, "elapsed_time": "0:11:47", "remaining_time": "0:36:08"}
|
77 |
+
{"current_steps": 77, "total_steps": 309, "loss": 0.7064, "lr": 9.33951351964525e-06, "epoch": 0.7421686746987952, "percentage": 24.92, "elapsed_time": "0:11:55", "remaining_time": "0:35:56"}
|
78 |
+
{"current_steps": 78, "total_steps": 309, "loss": 0.7174, "lr": 9.311169824509454e-06, "epoch": 0.7518072289156627, "percentage": 25.24, "elapsed_time": "0:12:05", "remaining_time": "0:35:49"}
|
79 |
+
{"current_steps": 79, "total_steps": 309, "loss": 0.7226, "lr": 9.28227557443528e-06, "epoch": 0.7614457831325301, "percentage": 25.57, "elapsed_time": "0:12:14", "remaining_time": "0:35:37"}
|
80 |
+
{"current_steps": 80, "total_steps": 309, "loss": 0.7238, "lr": 9.252834459342801e-06, "epoch": 0.7710843373493976, "percentage": 25.89, "elapsed_time": "0:12:24", "remaining_time": "0:35:30"}
|
81 |
+
{"current_steps": 81, "total_steps": 309, "loss": 0.706, "lr": 9.222850238989104e-06, "epoch": 0.7807228915662651, "percentage": 26.21, "elapsed_time": "0:12:32", "remaining_time": "0:35:19"}
|
82 |
+
{"current_steps": 82, "total_steps": 309, "loss": 0.7107, "lr": 9.192326742488153e-06, "epoch": 0.7903614457831325, "percentage": 26.54, "elapsed_time": "0:12:42", "remaining_time": "0:35:11"}
|
83 |
+
{"current_steps": 83, "total_steps": 309, "loss": 0.678, "lr": 9.161267867821802e-06, "epoch": 0.8, "percentage": 26.86, "elapsed_time": "0:12:51", "remaining_time": "0:34:59"}
|
84 |
+
{"current_steps": 84, "total_steps": 309, "loss": 0.7487, "lr": 9.129677581342e-06, "epoch": 0.8096385542168675, "percentage": 27.18, "elapsed_time": "0:12:58", "remaining_time": "0:34:46"}
|
85 |
+
{"current_steps": 85, "total_steps": 309, "loss": 0.6984, "lr": 9.097559917264268e-06, "epoch": 0.8192771084337349, "percentage": 27.51, "elapsed_time": "0:13:09", "remaining_time": "0:34:41"}
|
86 |
+
{"current_steps": 86, "total_steps": 309, "loss": 0.72, "lr": 9.064918977152517e-06, "epoch": 0.8289156626506025, "percentage": 27.83, "elapsed_time": "0:13:21", "remaining_time": "0:34:39"}
|
87 |
+
{"current_steps": 87, "total_steps": 309, "loss": 0.6858, "lr": 9.031758929395259e-06, "epoch": 0.8385542168674699, "percentage": 28.16, "elapsed_time": "0:13:32", "remaining_time": "0:34:32"}
|
88 |
+
{"current_steps": 88, "total_steps": 309, "loss": 0.7008, "lr": 8.998084008673284e-06, "epoch": 0.8481927710843373, "percentage": 28.48, "elapsed_time": "0:13:42", "remaining_time": "0:34:24"}
|
89 |
+
{"current_steps": 89, "total_steps": 309, "loss": 0.7093, "lr": 8.963898515418885e-06, "epoch": 0.8578313253012049, "percentage": 28.8, "elapsed_time": "0:13:50", "remaining_time": "0:34:14"}
|
90 |
+
{"current_steps": 90, "total_steps": 309, "loss": 0.6888, "lr": 8.929206815266653e-06, "epoch": 0.8674698795180723, "percentage": 29.13, "elapsed_time": "0:14:00", "remaining_time": "0:34:04"}
|
91 |
+
{"current_steps": 91, "total_steps": 309, "loss": 0.6948, "lr": 8.89401333849598e-06, "epoch": 0.8771084337349397, "percentage": 29.45, "elapsed_time": "0:14:08", "remaining_time": "0:33:53"}
|
92 |
+
{"current_steps": 92, "total_steps": 309, "loss": 0.7512, "lr": 8.8583225794653e-06, "epoch": 0.8867469879518072, "percentage": 29.77, "elapsed_time": "0:14:16", "remaining_time": "0:33:40"}
|
93 |
+
{"current_steps": 93, "total_steps": 309, "loss": 0.6964, "lr": 8.82213909603812e-06, "epoch": 0.8963855421686747, "percentage": 30.1, "elapsed_time": "0:14:25", "remaining_time": "0:33:29"}
|
94 |
+
{"current_steps": 94, "total_steps": 309, "loss": 0.7281, "lr": 8.78546750900098e-06, "epoch": 0.9060240963855422, "percentage": 30.42, "elapsed_time": "0:14:34", "remaining_time": "0:33:19"}
|
95 |
+
{"current_steps": 95, "total_steps": 309, "loss": 0.6991, "lr": 8.748312501473351e-06, "epoch": 0.9156626506024096, "percentage": 30.74, "elapsed_time": "0:14:43", "remaining_time": "0:33:09"}
|
96 |
+
{"current_steps": 96, "total_steps": 309, "loss": 0.6241, "lr": 8.710678818309576e-06, "epoch": 0.9253012048192771, "percentage": 31.07, "elapsed_time": "0:14:52", "remaining_time": "0:33:00"}
|
97 |
+
{"current_steps": 97, "total_steps": 309, "loss": 0.6176, "lr": 8.672571265492944e-06, "epoch": 0.9349397590361446, "percentage": 31.39, "elapsed_time": "0:15:03", "remaining_time": "0:32:53"}
|
98 |
+
{"current_steps": 98, "total_steps": 309, "loss": 0.672, "lr": 8.63399470952193e-06, "epoch": 0.944578313253012, "percentage": 31.72, "elapsed_time": "0:15:12", "remaining_time": "0:32:45"}
|
99 |
+
{"current_steps": 99, "total_steps": 309, "loss": 0.7228, "lr": 8.594954076788736e-06, "epoch": 0.9542168674698795, "percentage": 32.04, "elapsed_time": "0:15:21", "remaining_time": "0:32:33"}
|
100 |
+
{"current_steps": 100, "total_steps": 309, "loss": 0.7101, "lr": 8.555454352950161e-06, "epoch": 0.963855421686747, "percentage": 32.36, "elapsed_time": "0:15:29", "remaining_time": "0:32:22"}
|
101 |
+
{"current_steps": 101, "total_steps": 309, "loss": 0.6865, "lr": 8.515500582290914e-06, "epoch": 0.9734939759036144, "percentage": 32.69, "elapsed_time": "0:15:37", "remaining_time": "0:32:10"}
|
102 |
+
{"current_steps": 102, "total_steps": 309, "loss": 0.6994, "lr": 8.475097867079437e-06, "epoch": 0.983132530120482, "percentage": 33.01, "elapsed_time": "0:15:45", "remaining_time": "0:31:58"}
|
103 |
+
{"current_steps": 103, "total_steps": 309, "loss": 0.7047, "lr": 8.434251366916323e-06, "epoch": 0.9927710843373494, "percentage": 33.33, "elapsed_time": "0:15:56", "remaining_time": "0:31:52"}
|
104 |
+
{"current_steps": 104, "total_steps": 309, "loss": 0.6859, "lr": 8.392966298075413e-06, "epoch": 1.0, "percentage": 33.66, "elapsed_time": "0:16:03", "remaining_time": "0:31:39"}
|
105 |
+
{"current_steps": 105, "total_steps": 309, "loss": 0.6524, "lr": 8.351247932837655e-06, "epoch": 1.0096385542168675, "percentage": 33.98, "elapsed_time": "0:16:14", "remaining_time": "0:31:32"}
|
106 |
+
{"current_steps": 106, "total_steps": 309, "loss": 0.6007, "lr": 8.309101598817812e-06, "epoch": 1.0192771084337349, "percentage": 34.3, "elapsed_time": "0:16:23", "remaining_time": "0:31:23"}
|
107 |
+
{"current_steps": 107, "total_steps": 309, "loss": 0.6718, "lr": 8.266532678284103e-06, "epoch": 1.0289156626506024, "percentage": 34.63, "elapsed_time": "0:16:32", "remaining_time": "0:31:12"}
|
108 |
+
{"current_steps": 108, "total_steps": 309, "loss": 0.62, "lr": 8.223546607470863e-06, "epoch": 1.03855421686747, "percentage": 34.95, "elapsed_time": "0:16:39", "remaining_time": "0:31:00"}
|
109 |
+
{"current_steps": 109, "total_steps": 309, "loss": 0.5977, "lr": 8.18014887588431e-06, "epoch": 1.0481927710843373, "percentage": 35.28, "elapsed_time": "0:16:47", "remaining_time": "0:30:49"}
|
110 |
+
{"current_steps": 110, "total_steps": 309, "loss": 0.624, "lr": 8.13634502560152e-06, "epoch": 1.0578313253012048, "percentage": 35.6, "elapsed_time": "0:16:56", "remaining_time": "0:30:38"}
|
111 |
+
{"current_steps": 111, "total_steps": 309, "loss": 0.6297, "lr": 8.092140650562665e-06, "epoch": 1.0674698795180724, "percentage": 35.92, "elapsed_time": "0:17:06", "remaining_time": "0:30:30"}
|
112 |
+
{"current_steps": 112, "total_steps": 309, "loss": 0.628, "lr": 8.047541395856661e-06, "epoch": 1.0771084337349397, "percentage": 36.25, "elapsed_time": "0:17:15", "remaining_time": "0:30:21"}
|
113 |
+
{"current_steps": 113, "total_steps": 309, "loss": 0.6064, "lr": 8.002552957000254e-06, "epoch": 1.0867469879518072, "percentage": 36.57, "elapsed_time": "0:17:24", "remaining_time": "0:30:11"}
|
114 |
+
{"current_steps": 114, "total_steps": 309, "loss": 0.6034, "lr": 7.957181079210676e-06, "epoch": 1.0963855421686748, "percentage": 36.89, "elapsed_time": "0:17:34", "remaining_time": "0:30:03"}
|
115 |
+
{"current_steps": 115, "total_steps": 309, "loss": 0.5827, "lr": 7.911431556671967e-06, "epoch": 1.106024096385542, "percentage": 37.22, "elapsed_time": "0:17:43", "remaining_time": "0:29:54"}
|
116 |
+
{"current_steps": 116, "total_steps": 309, "loss": 0.6235, "lr": 7.865310231795026e-06, "epoch": 1.1156626506024097, "percentage": 37.54, "elapsed_time": "0:17:52", "remaining_time": "0:29:43"}
|
117 |
+
{"current_steps": 117, "total_steps": 309, "loss": 0.6082, "lr": 7.818822994471504e-06, "epoch": 1.1253012048192772, "percentage": 37.86, "elapsed_time": "0:18:00", "remaining_time": "0:29:33"}
|
118 |
+
{"current_steps": 118, "total_steps": 309, "loss": 0.6335, "lr": 7.771975781321655e-06, "epoch": 1.1349397590361445, "percentage": 38.19, "elapsed_time": "0:18:09", "remaining_time": "0:29:23"}
|
119 |
+
{"current_steps": 119, "total_steps": 309, "loss": 0.6208, "lr": 7.72477457493619e-06, "epoch": 1.144578313253012, "percentage": 38.51, "elapsed_time": "0:18:18", "remaining_time": "0:29:13"}
|
120 |
+
{"current_steps": 120, "total_steps": 309, "loss": 0.6314, "lr": 7.677225403112277e-06, "epoch": 1.1542168674698796, "percentage": 38.83, "elapsed_time": "0:18:27", "remaining_time": "0:29:04"}
|
121 |
+
{"current_steps": 121, "total_steps": 309, "loss": 0.618, "lr": 7.629334338083774e-06, "epoch": 1.163855421686747, "percentage": 39.16, "elapsed_time": "0:18:37", "remaining_time": "0:28:56"}
|
122 |
+
{"current_steps": 122, "total_steps": 309, "loss": 0.6127, "lr": 7.58110749574577e-06, "epoch": 1.1734939759036145, "percentage": 39.48, "elapsed_time": "0:18:46", "remaining_time": "0:28:46"}
|
123 |
+
{"current_steps": 123, "total_steps": 309, "loss": 0.6414, "lr": 7.532551034873558e-06, "epoch": 1.1831325301204818, "percentage": 39.81, "elapsed_time": "0:18:56", "remaining_time": "0:28:37"}
|
124 |
+
{"current_steps": 124, "total_steps": 309, "loss": 0.5967, "lr": 7.483671156336142e-06, "epoch": 1.1927710843373494, "percentage": 40.13, "elapsed_time": "0:19:03", "remaining_time": "0:28:26"}
|
125 |
+
{"current_steps": 125, "total_steps": 309, "loss": 0.6185, "lr": 7.43447410230435e-06, "epoch": 1.202409638554217, "percentage": 40.45, "elapsed_time": "0:19:11", "remaining_time": "0:28:15"}
|
126 |
+
{"current_steps": 126, "total_steps": 309, "loss": 0.6412, "lr": 7.384966155453686e-06, "epoch": 1.2120481927710842, "percentage": 40.78, "elapsed_time": "0:19:20", "remaining_time": "0:28:05"}
|
127 |
+
{"current_steps": 127, "total_steps": 309, "loss": 0.6173, "lr": 7.335153638162005e-06, "epoch": 1.2216867469879518, "percentage": 41.1, "elapsed_time": "0:19:31", "remaining_time": "0:27:59"}
|
128 |
+
{"current_steps": 128, "total_steps": 309, "loss": 0.5867, "lr": 7.285042911702116e-06, "epoch": 1.2313253012048193, "percentage": 41.42, "elapsed_time": "0:19:42", "remaining_time": "0:27:51"}
|
129 |
+
{"current_steps": 129, "total_steps": 309, "loss": 0.617, "lr": 7.234640375429427e-06, "epoch": 1.2409638554216866, "percentage": 41.75, "elapsed_time": "0:19:51", "remaining_time": "0:27:42"}
|
130 |
+
{"current_steps": 130, "total_steps": 309, "loss": 0.6193, "lr": 7.183952465964711e-06, "epoch": 1.2506024096385542, "percentage": 42.07, "elapsed_time": "0:19:59", "remaining_time": "0:27:31"}
|
131 |
+
{"current_steps": 131, "total_steps": 309, "loss": 0.6423, "lr": 7.132985656372126e-06, "epoch": 1.2602409638554217, "percentage": 42.39, "elapsed_time": "0:20:08", "remaining_time": "0:27:22"}
|
132 |
+
{"current_steps": 132, "total_steps": 309, "loss": 0.5984, "lr": 7.0817464553325764e-06, "epoch": 1.269879518072289, "percentage": 42.72, "elapsed_time": "0:20:18", "remaining_time": "0:27:13"}
|
133 |
+
{"current_steps": 133, "total_steps": 309, "loss": 0.6179, "lr": 7.030241406312528e-06, "epoch": 1.2795180722891566, "percentage": 43.04, "elapsed_time": "0:20:25", "remaining_time": "0:27:02"}
|
134 |
+
{"current_steps": 134, "total_steps": 309, "loss": 0.6758, "lr": 6.978477086728375e-06, "epoch": 1.2891566265060241, "percentage": 43.37, "elapsed_time": "0:20:35", "remaining_time": "0:26:53"}
|
135 |
+
{"current_steps": 135, "total_steps": 309, "loss": 0.6083, "lr": 6.926460107106483e-06, "epoch": 1.2987951807228915, "percentage": 43.69, "elapsed_time": "0:20:46", "remaining_time": "0:26:46"}
|
136 |
+
{"current_steps": 136, "total_steps": 309, "loss": 0.621, "lr": 6.874197110238986e-06, "epoch": 1.308433734939759, "percentage": 44.01, "elapsed_time": "0:20:54", "remaining_time": "0:26:35"}
|
137 |
+
{"current_steps": 137, "total_steps": 309, "loss": 0.602, "lr": 6.8216947703354815e-06, "epoch": 1.3180722891566266, "percentage": 44.34, "elapsed_time": "0:21:04", "remaining_time": "0:26:27"}
|
138 |
+
{"current_steps": 138, "total_steps": 309, "loss": 0.6078, "lr": 6.7689597921707065e-06, "epoch": 1.3277108433734939, "percentage": 44.66, "elapsed_time": "0:21:13", "remaining_time": "0:26:18"}
|
139 |
+
{"current_steps": 139, "total_steps": 309, "loss": 0.6012, "lr": 6.715998910228296e-06, "epoch": 1.3373493975903614, "percentage": 44.98, "elapsed_time": "0:21:23", "remaining_time": "0:26:10"}
|
140 |
+
{"current_steps": 140, "total_steps": 309, "loss": 0.5436, "lr": 6.6628188878407806e-06, "epoch": 1.346987951807229, "percentage": 45.31, "elapsed_time": "0:21:33", "remaining_time": "0:26:01"}
|
141 |
+
{"current_steps": 141, "total_steps": 309, "loss": 0.6328, "lr": 6.609426516325859e-06, "epoch": 1.3566265060240963, "percentage": 45.63, "elapsed_time": "0:21:43", "remaining_time": "0:25:53"}
|
142 |
+
{"current_steps": 142, "total_steps": 309, "loss": 0.6157, "lr": 6.555828614119132e-06, "epoch": 1.3662650602409638, "percentage": 45.95, "elapsed_time": "0:21:53", "remaining_time": "0:25:44"}
|
143 |
+
{"current_steps": 143, "total_steps": 309, "loss": 0.668, "lr": 6.502032025903356e-06, "epoch": 1.3759036144578314, "percentage": 46.28, "elapsed_time": "0:22:02", "remaining_time": "0:25:35"}
|
144 |
+
{"current_steps": 144, "total_steps": 309, "loss": 0.6411, "lr": 6.4480436217343366e-06, "epoch": 1.3855421686746987, "percentage": 46.6, "elapsed_time": "0:22:13", "remaining_time": "0:25:28"}
|
145 |
+
{"current_steps": 145, "total_steps": 309, "loss": 0.6235, "lr": 6.393870296163616e-06, "epoch": 1.3951807228915662, "percentage": 46.93, "elapsed_time": "0:22:23", "remaining_time": "0:25:19"}
|
146 |
+
{"current_steps": 146, "total_steps": 309, "loss": 0.6281, "lr": 6.339518967357985e-06, "epoch": 1.4048192771084338, "percentage": 47.25, "elapsed_time": "0:22:33", "remaining_time": "0:25:10"}
|
147 |
+
{"current_steps": 147, "total_steps": 309, "loss": 0.5896, "lr": 6.284996576216014e-06, "epoch": 1.4144578313253011, "percentage": 47.57, "elapsed_time": "0:22:44", "remaining_time": "0:25:04"}
|
148 |
+
{"current_steps": 148, "total_steps": 309, "loss": 0.6225, "lr": 6.230310085481677e-06, "epoch": 1.4240963855421687, "percentage": 47.9, "elapsed_time": "0:22:52", "remaining_time": "0:24:52"}
|
149 |
+
{"current_steps": 149, "total_steps": 309, "loss": 0.6683, "lr": 6.175466478855161e-06, "epoch": 1.4337349397590362, "percentage": 48.22, "elapsed_time": "0:23:02", "remaining_time": "0:24:44"}
|
150 |
+
{"current_steps": 150, "total_steps": 309, "loss": 0.665, "lr": 6.1204727601010396e-06, "epoch": 1.4433734939759035, "percentage": 48.54, "elapsed_time": "0:23:11", "remaining_time": "0:24:35"}
|
151 |
+
{"current_steps": 151, "total_steps": 309, "loss": 0.6313, "lr": 6.065335952153846e-06, "epoch": 1.453012048192771, "percentage": 48.87, "elapsed_time": "0:23:20", "remaining_time": "0:24:25"}
|
152 |
+
{"current_steps": 152, "total_steps": 309, "loss": 0.6139, "lr": 6.010063096221215e-06, "epoch": 1.4626506024096386, "percentage": 49.19, "elapsed_time": "0:23:29", "remaining_time": "0:24:15"}
|
153 |
+
{"current_steps": 153, "total_steps": 309, "loss": 0.6263, "lr": 5.954661250884704e-06, "epoch": 1.472289156626506, "percentage": 49.51, "elapsed_time": "0:23:38", "remaining_time": "0:24:06"}
|
154 |
+
{"current_steps": 154, "total_steps": 309, "loss": 0.6635, "lr": 5.899137491198364e-06, "epoch": 1.4819277108433735, "percentage": 49.84, "elapsed_time": "0:23:46", "remaining_time": "0:23:56"}
|
155 |
+
{"current_steps": 155, "total_steps": 309, "loss": 0.5506, "lr": 5.843498907785236e-06, "epoch": 1.491566265060241, "percentage": 50.16, "elapsed_time": "0:23:55", "remaining_time": "0:23:46"}
|
156 |
+
{"current_steps": 156, "total_steps": 309, "loss": 0.6193, "lr": 5.78775260593185e-06, "epoch": 1.5012048192771084, "percentage": 50.49, "elapsed_time": "0:24:03", "remaining_time": "0:23:35"}
|
157 |
+
{"current_steps": 157, "total_steps": 309, "loss": 0.6251, "lr": 5.731905704680834e-06, "epoch": 1.510843373493976, "percentage": 50.81, "elapsed_time": "0:24:13", "remaining_time": "0:23:27"}
|
158 |
+
{"current_steps": 158, "total_steps": 309, "loss": 0.6761, "lr": 5.6759653359218e-06, "epoch": 1.5204819277108435, "percentage": 51.13, "elapsed_time": "0:24:24", "remaining_time": "0:23:19"}
|
159 |
+
{"current_steps": 159, "total_steps": 309, "loss": 0.5842, "lr": 5.6199386434805615e-06, "epoch": 1.5301204819277108, "percentage": 51.46, "elapsed_time": "0:24:33", "remaining_time": "0:23:10"}
|
160 |
+
{"current_steps": 160, "total_steps": 309, "loss": 0.6087, "lr": 5.563832782206835e-06, "epoch": 1.5397590361445783, "percentage": 51.78, "elapsed_time": "0:24:45", "remaining_time": "0:23:03"}
|
161 |
+
{"current_steps": 161, "total_steps": 309, "loss": 0.6059, "lr": 5.507654917060541e-06, "epoch": 1.5493975903614459, "percentage": 52.1, "elapsed_time": "0:24:55", "remaining_time": "0:22:54"}
|
162 |
+
{"current_steps": 162, "total_steps": 309, "loss": 0.5965, "lr": 5.451412222196801e-06, "epoch": 1.5590361445783132, "percentage": 52.43, "elapsed_time": "0:25:04", "remaining_time": "0:22:45"}
|
163 |
+
{"current_steps": 163, "total_steps": 309, "loss": 0.5697, "lr": 5.395111880049775e-06, "epoch": 1.5686746987951807, "percentage": 52.75, "elapsed_time": "0:25:12", "remaining_time": "0:22:35"}
|
164 |
+
{"current_steps": 164, "total_steps": 309, "loss": 0.6026, "lr": 5.338761080415425e-06, "epoch": 1.5783132530120483, "percentage": 53.07, "elapsed_time": "0:25:22", "remaining_time": "0:22:26"}
|
165 |
+
{"current_steps": 165, "total_steps": 309, "loss": 0.5812, "lr": 5.28236701953335e-06, "epoch": 1.5879518072289156, "percentage": 53.4, "elapsed_time": "0:25:32", "remaining_time": "0:22:17"}
|
166 |
+
{"current_steps": 166, "total_steps": 309, "loss": 0.6347, "lr": 5.225936899167803e-06, "epoch": 1.5975903614457831, "percentage": 53.72, "elapsed_time": "0:25:41", "remaining_time": "0:22:07"}
|
167 |
+
{"current_steps": 167, "total_steps": 309, "loss": 0.61, "lr": 5.169477925687981e-06, "epoch": 1.6072289156626507, "percentage": 54.05, "elapsed_time": "0:25:51", "remaining_time": "0:21:59"}
|
168 |
+
{"current_steps": 168, "total_steps": 309, "loss": 0.6102, "lr": 5.112997309147753e-06, "epoch": 1.616867469879518, "percentage": 54.37, "elapsed_time": "0:26:00", "remaining_time": "0:21:49"}
|
169 |
+
{"current_steps": 169, "total_steps": 309, "loss": 0.6634, "lr": 5.0565022623649e-06, "epoch": 1.6265060240963856, "percentage": 54.69, "elapsed_time": "0:26:10", "remaining_time": "0:21:40"}
|
170 |
+
{"current_steps": 170, "total_steps": 309, "loss": 0.6221, "lr": 5e-06, "epoch": 1.636144578313253, "percentage": 55.02, "elapsed_time": "0:26:21", "remaining_time": "0:21:33"}
|
171 |
+
{"current_steps": 171, "total_steps": 309, "loss": 0.576, "lr": 4.943497737635103e-06, "epoch": 1.6457831325301204, "percentage": 55.34, "elapsed_time": "0:26:29", "remaining_time": "0:21:22"}
|
172 |
+
{"current_steps": 172, "total_steps": 309, "loss": 0.5677, "lr": 4.887002690852249e-06, "epoch": 1.655421686746988, "percentage": 55.66, "elapsed_time": "0:26:38", "remaining_time": "0:21:13"}
|
173 |
+
{"current_steps": 173, "total_steps": 309, "loss": 0.6066, "lr": 4.830522074312019e-06, "epoch": 1.6650602409638555, "percentage": 55.99, "elapsed_time": "0:26:48", "remaining_time": "0:21:04"}
|
174 |
+
{"current_steps": 174, "total_steps": 309, "loss": 0.6569, "lr": 4.774063100832199e-06, "epoch": 1.6746987951807228, "percentage": 56.31, "elapsed_time": "0:26:55", "remaining_time": "0:20:53"}
|
175 |
+
{"current_steps": 175, "total_steps": 309, "loss": 0.6171, "lr": 4.717632980466652e-06, "epoch": 1.6843373493975904, "percentage": 56.63, "elapsed_time": "0:27:05", "remaining_time": "0:20:44"}
|
176 |
+
{"current_steps": 176, "total_steps": 309, "loss": 0.6257, "lr": 4.661238919584578e-06, "epoch": 1.693975903614458, "percentage": 56.96, "elapsed_time": "0:27:15", "remaining_time": "0:20:35"}
|
177 |
+
{"current_steps": 177, "total_steps": 309, "loss": 0.5744, "lr": 4.6048881199502265e-06, "epoch": 1.7036144578313253, "percentage": 57.28, "elapsed_time": "0:27:25", "remaining_time": "0:20:26"}
|
178 |
+
{"current_steps": 178, "total_steps": 309, "loss": 0.6479, "lr": 4.548587777803198e-06, "epoch": 1.7132530120481928, "percentage": 57.61, "elapsed_time": "0:27:34", "remaining_time": "0:20:17"}
|
179 |
+
{"current_steps": 179, "total_steps": 309, "loss": 0.5905, "lr": 4.49234508293946e-06, "epoch": 1.7228915662650603, "percentage": 57.93, "elapsed_time": "0:27:43", "remaining_time": "0:20:08"}
|
180 |
+
{"current_steps": 180, "total_steps": 309, "loss": 0.6377, "lr": 4.436167217793167e-06, "epoch": 1.7325301204819277, "percentage": 58.25, "elapsed_time": "0:27:53", "remaining_time": "0:19:59"}
|
181 |
+
{"current_steps": 181, "total_steps": 309, "loss": 0.6343, "lr": 4.38006135651944e-06, "epoch": 1.7421686746987952, "percentage": 58.58, "elapsed_time": "0:28:01", "remaining_time": "0:19:49"}
|
182 |
+
{"current_steps": 182, "total_steps": 309, "loss": 0.604, "lr": 4.3240346640782014e-06, "epoch": 1.7518072289156628, "percentage": 58.9, "elapsed_time": "0:28:09", "remaining_time": "0:19:39"}
|
183 |
+
{"current_steps": 183, "total_steps": 309, "loss": 0.5884, "lr": 4.268094295319167e-06, "epoch": 1.76144578313253, "percentage": 59.22, "elapsed_time": "0:28:19", "remaining_time": "0:19:30"}
|
184 |
+
{"current_steps": 184, "total_steps": 309, "loss": 0.5786, "lr": 4.212247394068151e-06, "epoch": 1.7710843373493976, "percentage": 59.55, "elapsed_time": "0:28:29", "remaining_time": "0:19:21"}
|
185 |
+
{"current_steps": 185, "total_steps": 309, "loss": 0.568, "lr": 4.1565010922147644e-06, "epoch": 1.7807228915662652, "percentage": 59.87, "elapsed_time": "0:28:40", "remaining_time": "0:19:12"}
|
186 |
+
{"current_steps": 186, "total_steps": 309, "loss": 0.6216, "lr": 4.100862508801639e-06, "epoch": 1.7903614457831325, "percentage": 60.19, "elapsed_time": "0:28:49", "remaining_time": "0:19:03"}
|
187 |
+
{"current_steps": 187, "total_steps": 309, "loss": 0.58, "lr": 4.045338749115299e-06, "epoch": 1.8, "percentage": 60.52, "elapsed_time": "0:28:58", "remaining_time": "0:18:54"}
|
188 |
+
{"current_steps": 188, "total_steps": 309, "loss": 0.6225, "lr": 3.989936903778785e-06, "epoch": 1.8096385542168676, "percentage": 60.84, "elapsed_time": "0:29:06", "remaining_time": "0:18:44"}
|
189 |
+
{"current_steps": 189, "total_steps": 309, "loss": 0.5685, "lr": 3.934664047846157e-06, "epoch": 1.819277108433735, "percentage": 61.17, "elapsed_time": "0:29:15", "remaining_time": "0:18:34"}
|
190 |
+
{"current_steps": 190, "total_steps": 309, "loss": 0.5875, "lr": 3.879527239898962e-06, "epoch": 1.8289156626506025, "percentage": 61.49, "elapsed_time": "0:29:24", "remaining_time": "0:18:25"}
|
191 |
+
{"current_steps": 191, "total_steps": 309, "loss": 0.5635, "lr": 3.8245335211448404e-06, "epoch": 1.83855421686747, "percentage": 61.81, "elapsed_time": "0:29:34", "remaining_time": "0:18:16"}
|
192 |
+
{"current_steps": 192, "total_steps": 309, "loss": 0.6155, "lr": 3.769689914518326e-06, "epoch": 1.8481927710843373, "percentage": 62.14, "elapsed_time": "0:29:43", "remaining_time": "0:18:06"}
|
193 |
+
{"current_steps": 193, "total_steps": 309, "loss": 0.624, "lr": 3.715003423783986e-06, "epoch": 1.8578313253012049, "percentage": 62.46, "elapsed_time": "0:29:53", "remaining_time": "0:17:57"}
|
194 |
+
{"current_steps": 194, "total_steps": 309, "loss": 0.6506, "lr": 3.660481032642016e-06, "epoch": 1.8674698795180724, "percentage": 62.78, "elapsed_time": "0:30:02", "remaining_time": "0:17:48"}
|
195 |
+
{"current_steps": 195, "total_steps": 309, "loss": 0.602, "lr": 3.6061297038363853e-06, "epoch": 1.8771084337349397, "percentage": 63.11, "elapsed_time": "0:30:10", "remaining_time": "0:17:38"}
|
196 |
+
{"current_steps": 196, "total_steps": 309, "loss": 0.6721, "lr": 3.5519563782656642e-06, "epoch": 1.886746987951807, "percentage": 63.43, "elapsed_time": "0:30:19", "remaining_time": "0:17:29"}
|
197 |
+
{"current_steps": 197, "total_steps": 309, "loss": 0.6107, "lr": 3.497967974096647e-06, "epoch": 1.8963855421686748, "percentage": 63.75, "elapsed_time": "0:30:27", "remaining_time": "0:17:19"}
|
198 |
+
{"current_steps": 198, "total_steps": 309, "loss": 0.582, "lr": 3.4441713858808684e-06, "epoch": 1.9060240963855422, "percentage": 64.08, "elapsed_time": "0:30:35", "remaining_time": "0:17:09"}
|
199 |
+
{"current_steps": 199, "total_steps": 309, "loss": 0.5896, "lr": 3.3905734836741415e-06, "epoch": 1.9156626506024095, "percentage": 64.4, "elapsed_time": "0:30:45", "remaining_time": "0:17:00"}
|
200 |
+
{"current_steps": 200, "total_steps": 309, "loss": 0.551, "lr": 3.3371811121592203e-06, "epoch": 1.9253012048192772, "percentage": 64.72, "elapsed_time": "0:30:54", "remaining_time": "0:16:50"}
|
201 |
+
{"current_steps": 201, "total_steps": 309, "loss": 0.603, "lr": 3.2840010897717045e-06, "epoch": 1.9349397590361446, "percentage": 65.05, "elapsed_time": "0:32:14", "remaining_time": "0:17:19"}
|
202 |
+
{"current_steps": 202, "total_steps": 309, "loss": 0.6072, "lr": 3.2310402078292956e-06, "epoch": 1.944578313253012, "percentage": 65.37, "elapsed_time": "0:32:24", "remaining_time": "0:17:09"}
|
203 |
+
{"current_steps": 203, "total_steps": 309, "loss": 0.5848, "lr": 3.178305229664519e-06, "epoch": 1.9542168674698797, "percentage": 65.7, "elapsed_time": "0:32:34", "remaining_time": "0:17:00"}
|
204 |
+
{"current_steps": 204, "total_steps": 309, "loss": 0.6262, "lr": 3.125802889761016e-06, "epoch": 1.963855421686747, "percentage": 66.02, "elapsed_time": "0:32:43", "remaining_time": "0:16:50"}
|
205 |
+
{"current_steps": 205, "total_steps": 309, "loss": 0.5991, "lr": 3.073539892893519e-06, "epoch": 1.9734939759036143, "percentage": 66.34, "elapsed_time": "0:32:53", "remaining_time": "0:16:41"}
|
206 |
+
{"current_steps": 206, "total_steps": 309, "loss": 0.601, "lr": 3.021522913271627e-06, "epoch": 1.983132530120482, "percentage": 66.67, "elapsed_time": "0:33:03", "remaining_time": "0:16:31"}
|
207 |
+
{"current_steps": 207, "total_steps": 309, "loss": 0.5751, "lr": 2.969758593687475e-06, "epoch": 1.9927710843373494, "percentage": 66.99, "elapsed_time": "0:33:12", "remaining_time": "0:16:21"}
|
208 |
+
{"current_steps": 208, "total_steps": 309, "loss": 0.5888, "lr": 2.9182535446674244e-06, "epoch": 2.0, "percentage": 67.31, "elapsed_time": "0:33:18", "remaining_time": "0:16:10"}
|
209 |
+
{"current_steps": 209, "total_steps": 309, "loss": 0.5468, "lr": 2.8670143436278757e-06, "epoch": 2.0096385542168673, "percentage": 67.64, "elapsed_time": "0:33:28", "remaining_time": "0:16:00"}
|
210 |
+
{"current_steps": 210, "total_steps": 309, "loss": 0.5644, "lr": 2.8160475340352913e-06, "epoch": 2.019277108433735, "percentage": 67.96, "elapsed_time": "0:33:36", "remaining_time": "0:15:50"}
|
211 |
+
{"current_steps": 211, "total_steps": 309, "loss": 0.5886, "lr": 2.765359624570574e-06, "epoch": 2.0289156626506024, "percentage": 68.28, "elapsed_time": "0:33:46", "remaining_time": "0:15:41"}
|
212 |
+
{"current_steps": 212, "total_steps": 309, "loss": 0.527, "lr": 2.714957088297886e-06, "epoch": 2.0385542168674697, "percentage": 68.61, "elapsed_time": "0:33:56", "remaining_time": "0:15:31"}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6594147ffe8cbc8c34d426d74aec0ee7c842458c25bd805019acec2bb6d9cb7c
|
3 |
+
size 7864
|