Delete checkpoint-1000
Browse files- checkpoint-1000/config.json +0 -28
- checkpoint-1000/generation_config.json +0 -7
- checkpoint-1000/model-00001-of-00003.safetensors +0 -3
- checkpoint-1000/model-00002-of-00003.safetensors +0 -3
- checkpoint-1000/model-00003-of-00003.safetensors +0 -3
- checkpoint-1000/model.safetensors.index.json +0 -171
- checkpoint-1000/optimizer.pt +0 -3
- checkpoint-1000/rng_state_0.pth +0 -3
- checkpoint-1000/rng_state_1.pth +0 -3
- checkpoint-1000/rng_state_2.pth +0 -3
- checkpoint-1000/rng_state_3.pth +0 -3
- checkpoint-1000/scheduler.pt +0 -3
- checkpoint-1000/special_tokens_map.json +0 -30
- checkpoint-1000/tokenizer.model +0 -3
- checkpoint-1000/tokenizer_config.json +0 -51
- checkpoint-1000/trainer_state.json +0 -721
- checkpoint-1000/training_args.bin +0 -3
checkpoint-1000/config.json
DELETED
@@ -1,28 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"_name_or_path": "google/gemma-2b",
|
3 |
-
"architectures": [
|
4 |
-
"GemmaForCausalLM"
|
5 |
-
],
|
6 |
-
"attention_bias": false,
|
7 |
-
"attention_dropout": 0.0,
|
8 |
-
"bos_token_id": 2,
|
9 |
-
"eos_token_id": 1,
|
10 |
-
"head_dim": 256,
|
11 |
-
"hidden_act": "gelu",
|
12 |
-
"hidden_size": 2048,
|
13 |
-
"initializer_range": 0.02,
|
14 |
-
"intermediate_size": 16384,
|
15 |
-
"max_position_embeddings": 8192,
|
16 |
-
"model_type": "gemma",
|
17 |
-
"num_attention_heads": 8,
|
18 |
-
"num_hidden_layers": 18,
|
19 |
-
"num_key_value_heads": 1,
|
20 |
-
"pad_token_id": 0,
|
21 |
-
"rms_norm_eps": 1e-06,
|
22 |
-
"rope_scaling": null,
|
23 |
-
"rope_theta": 10000.0,
|
24 |
-
"torch_dtype": "float32",
|
25 |
-
"transformers_version": "4.39.0.dev0",
|
26 |
-
"use_cache": false,
|
27 |
-
"vocab_size": 256000
|
28 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-1000/generation_config.json
DELETED
@@ -1,7 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"_from_model_config": true,
|
3 |
-
"bos_token_id": 2,
|
4 |
-
"eos_token_id": 1,
|
5 |
-
"pad_token_id": 0,
|
6 |
-
"transformers_version": "4.39.0.dev0"
|
7 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-1000/model-00001-of-00003.safetensors
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:0368dc4194ee2fdbe49909e4846ccc81d0b9fb3d1a1fd9c9d3b26a72c7895653
|
3 |
-
size 4911635192
|
|
|
|
|
|
|
|
checkpoint-1000/model-00002-of-00003.safetensors
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f92fd384fe15130d9e13c2dc3394517b4fd642d7af96d5e734699919ff39a365
|
3 |
-
size 4978830584
|
|
|
|
|
|
|
|
checkpoint-1000/model-00003-of-00003.safetensors
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:585b62412105f82990495f02d7c75c28b5e3c5777ce25a5021ffb17ee4c151e5
|
3 |
-
size 134242760
|
|
|
|
|
|
|
|
checkpoint-1000/model.safetensors.index.json
DELETED
@@ -1,171 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"metadata": {
|
3 |
-
"total_size": 10024689664
|
4 |
-
},
|
5 |
-
"weight_map": {
|
6 |
-
"model.embed_tokens.weight": "model-00001-of-00003.safetensors",
|
7 |
-
"model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
8 |
-
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
9 |
-
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
10 |
-
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
11 |
-
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
12 |
-
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
13 |
-
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
14 |
-
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
15 |
-
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
16 |
-
"model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
17 |
-
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
18 |
-
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
19 |
-
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
20 |
-
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
21 |
-
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
22 |
-
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
23 |
-
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
24 |
-
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
25 |
-
"model.layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
26 |
-
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
27 |
-
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
28 |
-
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
29 |
-
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
30 |
-
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
31 |
-
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
32 |
-
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
33 |
-
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
34 |
-
"model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
35 |
-
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
36 |
-
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
37 |
-
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
38 |
-
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
39 |
-
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
40 |
-
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
41 |
-
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
42 |
-
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
43 |
-
"model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
44 |
-
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
45 |
-
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
46 |
-
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
47 |
-
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
48 |
-
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
49 |
-
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
50 |
-
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
51 |
-
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
52 |
-
"model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
53 |
-
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
54 |
-
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
55 |
-
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
56 |
-
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
57 |
-
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
58 |
-
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
59 |
-
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
60 |
-
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
61 |
-
"model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
62 |
-
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
63 |
-
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
64 |
-
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
65 |
-
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
66 |
-
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
67 |
-
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
68 |
-
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
69 |
-
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
70 |
-
"model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
71 |
-
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
72 |
-
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
73 |
-
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
74 |
-
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
75 |
-
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
76 |
-
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
77 |
-
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
78 |
-
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
79 |
-
"model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
80 |
-
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
81 |
-
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
82 |
-
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
83 |
-
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
84 |
-
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
85 |
-
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
86 |
-
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
87 |
-
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
88 |
-
"model.layers.17.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
89 |
-
"model.layers.17.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
90 |
-
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
91 |
-
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
92 |
-
"model.layers.17.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
93 |
-
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
94 |
-
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
95 |
-
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
96 |
-
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
97 |
-
"model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
98 |
-
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
99 |
-
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
100 |
-
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
101 |
-
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
102 |
-
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
103 |
-
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
104 |
-
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
105 |
-
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
106 |
-
"model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
107 |
-
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
108 |
-
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
109 |
-
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
110 |
-
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
111 |
-
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
112 |
-
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
113 |
-
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
114 |
-
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
115 |
-
"model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
116 |
-
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
117 |
-
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
118 |
-
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
119 |
-
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
120 |
-
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
121 |
-
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
122 |
-
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
123 |
-
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
124 |
-
"model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
125 |
-
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
126 |
-
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
127 |
-
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
128 |
-
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
129 |
-
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
130 |
-
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
131 |
-
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
132 |
-
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
133 |
-
"model.layers.6.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
134 |
-
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
135 |
-
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
136 |
-
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
137 |
-
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
138 |
-
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
139 |
-
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
140 |
-
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
141 |
-
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
142 |
-
"model.layers.7.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
143 |
-
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
144 |
-
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
145 |
-
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
146 |
-
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
147 |
-
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
148 |
-
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
149 |
-
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
150 |
-
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
151 |
-
"model.layers.8.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
152 |
-
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
153 |
-
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
154 |
-
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
155 |
-
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
156 |
-
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
157 |
-
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
158 |
-
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
159 |
-
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
160 |
-
"model.layers.9.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
161 |
-
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
162 |
-
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
163 |
-
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
164 |
-
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
165 |
-
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
166 |
-
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
167 |
-
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
168 |
-
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
169 |
-
"model.norm.weight": "model-00003-of-00003.safetensors"
|
170 |
-
}
|
171 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-1000/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:7015da0998d2376a75ff258403df7cc2e7ccd73d5263d78ab3e8265f255037cc
|
3 |
-
size 20049522541
|
|
|
|
|
|
|
|
checkpoint-1000/rng_state_0.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:1eafe3d5e0585dde8c5033613de99a5d4f23df4284a488f4007b3944580c0b97
|
3 |
-
size 17655
|
|
|
|
|
|
|
|
checkpoint-1000/rng_state_1.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:7e34eb456d2d003a2839f2daa9425e99bdd79ed7e24a1de9fc7d5738476bfb4b
|
3 |
-
size 17655
|
|
|
|
|
|
|
|
checkpoint-1000/rng_state_2.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:b374af4a2765d8771cee7a72921d3c2e438b9bee34f0b2d098ce6071afeb65e4
|
3 |
-
size 17655
|
|
|
|
|
|
|
|
checkpoint-1000/rng_state_3.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:5df75d8477fcc69c7abb03025313915ebfe3ac18c54a7c57aaa455c0099e13e5
|
3 |
-
size 17655
|
|
|
|
|
|
|
|
checkpoint-1000/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:20cd9ebcbc893aaeed7a3e32a2a0eeb344f999c91a3ede9cd26c28f6dd8957bb
|
3 |
-
size 627
|
|
|
|
|
|
|
|
checkpoint-1000/special_tokens_map.json
DELETED
@@ -1,30 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"bos_token": {
|
3 |
-
"content": "<bos>",
|
4 |
-
"lstrip": false,
|
5 |
-
"normalized": false,
|
6 |
-
"rstrip": false,
|
7 |
-
"single_word": false
|
8 |
-
},
|
9 |
-
"eos_token": {
|
10 |
-
"content": "<eos>",
|
11 |
-
"lstrip": false,
|
12 |
-
"normalized": false,
|
13 |
-
"rstrip": false,
|
14 |
-
"single_word": false
|
15 |
-
},
|
16 |
-
"pad_token": {
|
17 |
-
"content": "<pad>",
|
18 |
-
"lstrip": false,
|
19 |
-
"normalized": false,
|
20 |
-
"rstrip": false,
|
21 |
-
"single_word": false
|
22 |
-
},
|
23 |
-
"unk_token": {
|
24 |
-
"content": "<unk>",
|
25 |
-
"lstrip": false,
|
26 |
-
"normalized": false,
|
27 |
-
"rstrip": false,
|
28 |
-
"single_word": false
|
29 |
-
}
|
30 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-1000/tokenizer.model
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2
|
3 |
-
size 4241003
|
|
|
|
|
|
|
|
checkpoint-1000/tokenizer_config.json
DELETED
@@ -1,51 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"add_bos_token": true,
|
3 |
-
"add_eos_token": false,
|
4 |
-
"added_tokens_decoder": {
|
5 |
-
"0": {
|
6 |
-
"content": "<pad>",
|
7 |
-
"lstrip": false,
|
8 |
-
"normalized": false,
|
9 |
-
"rstrip": false,
|
10 |
-
"single_word": false,
|
11 |
-
"special": true
|
12 |
-
},
|
13 |
-
"1": {
|
14 |
-
"content": "<eos>",
|
15 |
-
"lstrip": false,
|
16 |
-
"normalized": false,
|
17 |
-
"rstrip": false,
|
18 |
-
"single_word": false,
|
19 |
-
"special": true
|
20 |
-
},
|
21 |
-
"2": {
|
22 |
-
"content": "<bos>",
|
23 |
-
"lstrip": false,
|
24 |
-
"normalized": false,
|
25 |
-
"rstrip": false,
|
26 |
-
"single_word": false,
|
27 |
-
"special": true
|
28 |
-
},
|
29 |
-
"3": {
|
30 |
-
"content": "<unk>",
|
31 |
-
"lstrip": false,
|
32 |
-
"normalized": false,
|
33 |
-
"rstrip": false,
|
34 |
-
"single_word": false,
|
35 |
-
"special": true
|
36 |
-
}
|
37 |
-
},
|
38 |
-
"bos_token": "<bos>",
|
39 |
-
"clean_up_tokenization_spaces": false,
|
40 |
-
"eos_token": "<eos>",
|
41 |
-
"legacy": null,
|
42 |
-
"model_max_length": 1000000000000000019884624838656,
|
43 |
-
"pad_token": "<pad>",
|
44 |
-
"padding_side": "right",
|
45 |
-
"sp_model_kwargs": {},
|
46 |
-
"spaces_between_special_tokens": false,
|
47 |
-
"split_special_tokens": false,
|
48 |
-
"tokenizer_class": "GemmaTokenizer",
|
49 |
-
"unk_token": "<unk>",
|
50 |
-
"use_default_system_prompt": false
|
51 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-1000/trainer_state.json
DELETED
@@ -1,721 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.6310143555765894,
|
5 |
-
"eval_steps": 500,
|
6 |
-
"global_step": 1000,
|
7 |
-
"is_hyper_param_search": false,
|
8 |
-
"is_local_process_zero": true,
|
9 |
-
"is_world_process_zero": true,
|
10 |
-
"log_history": [
|
11 |
-
{
|
12 |
-
"epoch": 0.01,
|
13 |
-
"grad_norm": 3.7190074920654297,
|
14 |
-
"learning_rate": 4.9995083170283816e-05,
|
15 |
-
"loss": 2.9245,
|
16 |
-
"step": 10
|
17 |
-
},
|
18 |
-
{
|
19 |
-
"epoch": 0.01,
|
20 |
-
"grad_norm": 3.431870222091675,
|
21 |
-
"learning_rate": 4.998033461515242e-05,
|
22 |
-
"loss": 2.0053,
|
23 |
-
"step": 20
|
24 |
-
},
|
25 |
-
{
|
26 |
-
"epoch": 0.02,
|
27 |
-
"grad_norm": 2.3315682411193848,
|
28 |
-
"learning_rate": 4.9955760135896534e-05,
|
29 |
-
"loss": 1.888,
|
30 |
-
"step": 30
|
31 |
-
},
|
32 |
-
{
|
33 |
-
"epoch": 0.03,
|
34 |
-
"grad_norm": 3.2937276363372803,
|
35 |
-
"learning_rate": 4.992136939879856e-05,
|
36 |
-
"loss": 1.8447,
|
37 |
-
"step": 40
|
38 |
-
},
|
39 |
-
{
|
40 |
-
"epoch": 0.03,
|
41 |
-
"grad_norm": 2.7375714778900146,
|
42 |
-
"learning_rate": 4.9877175931330346e-05,
|
43 |
-
"loss": 1.8212,
|
44 |
-
"step": 50
|
45 |
-
},
|
46 |
-
{
|
47 |
-
"epoch": 0.04,
|
48 |
-
"grad_norm": 2.15061092376709,
|
49 |
-
"learning_rate": 4.982319711683221e-05,
|
50 |
-
"loss": 1.793,
|
51 |
-
"step": 60
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"epoch": 0.04,
|
55 |
-
"grad_norm": 2.0427424907684326,
|
56 |
-
"learning_rate": 4.975945418767529e-05,
|
57 |
-
"loss": 1.756,
|
58 |
-
"step": 70
|
59 |
-
},
|
60 |
-
{
|
61 |
-
"epoch": 0.05,
|
62 |
-
"grad_norm": 2.107785224914551,
|
63 |
-
"learning_rate": 4.968597221690986e-05,
|
64 |
-
"loss": 1.7285,
|
65 |
-
"step": 80
|
66 |
-
},
|
67 |
-
{
|
68 |
-
"epoch": 0.06,
|
69 |
-
"grad_norm": 2.100552558898926,
|
70 |
-
"learning_rate": 4.96027801084029e-05,
|
71 |
-
"loss": 1.7297,
|
72 |
-
"step": 90
|
73 |
-
},
|
74 |
-
{
|
75 |
-
"epoch": 0.06,
|
76 |
-
"grad_norm": 2.2227377891540527,
|
77 |
-
"learning_rate": 4.950991058546893e-05,
|
78 |
-
"loss": 1.7602,
|
79 |
-
"step": 100
|
80 |
-
},
|
81 |
-
{
|
82 |
-
"epoch": 0.07,
|
83 |
-
"grad_norm": 1.535144567489624,
|
84 |
-
"learning_rate": 4.940740017799833e-05,
|
85 |
-
"loss": 1.7433,
|
86 |
-
"step": 110
|
87 |
-
},
|
88 |
-
{
|
89 |
-
"epoch": 0.08,
|
90 |
-
"grad_norm": 1.6522979736328125,
|
91 |
-
"learning_rate": 4.929528920808854e-05,
|
92 |
-
"loss": 1.7363,
|
93 |
-
"step": 120
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"epoch": 0.08,
|
97 |
-
"grad_norm": 2.8091869354248047,
|
98 |
-
"learning_rate": 4.917362177418342e-05,
|
99 |
-
"loss": 1.6872,
|
100 |
-
"step": 130
|
101 |
-
},
|
102 |
-
{
|
103 |
-
"epoch": 0.09,
|
104 |
-
"grad_norm": 2.1017510890960693,
|
105 |
-
"learning_rate": 4.904244573372733e-05,
|
106 |
-
"loss": 1.7084,
|
107 |
-
"step": 140
|
108 |
-
},
|
109 |
-
{
|
110 |
-
"epoch": 0.09,
|
111 |
-
"grad_norm": 1.6424258947372437,
|
112 |
-
"learning_rate": 4.8901812684340564e-05,
|
113 |
-
"loss": 1.6997,
|
114 |
-
"step": 150
|
115 |
-
},
|
116 |
-
{
|
117 |
-
"epoch": 0.1,
|
118 |
-
"grad_norm": 1.4547488689422607,
|
119 |
-
"learning_rate": 4.8751777943523634e-05,
|
120 |
-
"loss": 1.6747,
|
121 |
-
"step": 160
|
122 |
-
},
|
123 |
-
{
|
124 |
-
"epoch": 0.11,
|
125 |
-
"grad_norm": 1.6251146793365479,
|
126 |
-
"learning_rate": 4.8592400526898314e-05,
|
127 |
-
"loss": 1.6836,
|
128 |
-
"step": 170
|
129 |
-
},
|
130 |
-
{
|
131 |
-
"epoch": 0.11,
|
132 |
-
"grad_norm": 2.098386526107788,
|
133 |
-
"learning_rate": 4.842374312499405e-05,
|
134 |
-
"loss": 1.6552,
|
135 |
-
"step": 180
|
136 |
-
},
|
137 |
-
{
|
138 |
-
"epoch": 0.12,
|
139 |
-
"grad_norm": 2.2387640476226807,
|
140 |
-
"learning_rate": 4.824587207858888e-05,
|
141 |
-
"loss": 1.6489,
|
142 |
-
"step": 190
|
143 |
-
},
|
144 |
-
{
|
145 |
-
"epoch": 0.13,
|
146 |
-
"grad_norm": 1.7299611568450928,
|
147 |
-
"learning_rate": 4.805885735261454e-05,
|
148 |
-
"loss": 1.6576,
|
149 |
-
"step": 200
|
150 |
-
},
|
151 |
-
{
|
152 |
-
"epoch": 0.13,
|
153 |
-
"grad_norm": 1.5701665878295898,
|
154 |
-
"learning_rate": 4.786277250863599e-05,
|
155 |
-
"loss": 1.6533,
|
156 |
-
"step": 210
|
157 |
-
},
|
158 |
-
{
|
159 |
-
"epoch": 0.14,
|
160 |
-
"grad_norm": 2.417296886444092,
|
161 |
-
"learning_rate": 4.765769467591625e-05,
|
162 |
-
"loss": 1.6356,
|
163 |
-
"step": 220
|
164 |
-
},
|
165 |
-
{
|
166 |
-
"epoch": 0.15,
|
167 |
-
"grad_norm": 1.2636029720306396,
|
168 |
-
"learning_rate": 4.744370452107789e-05,
|
169 |
-
"loss": 1.6389,
|
170 |
-
"step": 230
|
171 |
-
},
|
172 |
-
{
|
173 |
-
"epoch": 0.15,
|
174 |
-
"grad_norm": 1.576324224472046,
|
175 |
-
"learning_rate": 4.722088621637309e-05,
|
176 |
-
"loss": 1.6546,
|
177 |
-
"step": 240
|
178 |
-
},
|
179 |
-
{
|
180 |
-
"epoch": 0.16,
|
181 |
-
"grad_norm": 1.9720542430877686,
|
182 |
-
"learning_rate": 4.698932740657479e-05,
|
183 |
-
"loss": 1.6354,
|
184 |
-
"step": 250
|
185 |
-
},
|
186 |
-
{
|
187 |
-
"epoch": 0.16,
|
188 |
-
"grad_norm": 1.5250279903411865,
|
189 |
-
"learning_rate": 4.6749119174501975e-05,
|
190 |
-
"loss": 1.6342,
|
191 |
-
"step": 260
|
192 |
-
},
|
193 |
-
{
|
194 |
-
"epoch": 0.17,
|
195 |
-
"grad_norm": 2.4737966060638428,
|
196 |
-
"learning_rate": 4.6500356005192514e-05,
|
197 |
-
"loss": 1.6407,
|
198 |
-
"step": 270
|
199 |
-
},
|
200 |
-
{
|
201 |
-
"epoch": 0.18,
|
202 |
-
"grad_norm": 1.2792372703552246,
|
203 |
-
"learning_rate": 4.6243135748737864e-05,
|
204 |
-
"loss": 1.6339,
|
205 |
-
"step": 280
|
206 |
-
},
|
207 |
-
{
|
208 |
-
"epoch": 0.18,
|
209 |
-
"grad_norm": 1.5593037605285645,
|
210 |
-
"learning_rate": 4.597755958179406e-05,
|
211 |
-
"loss": 1.6095,
|
212 |
-
"step": 290
|
213 |
-
},
|
214 |
-
{
|
215 |
-
"epoch": 0.19,
|
216 |
-
"grad_norm": 1.3141404390335083,
|
217 |
-
"learning_rate": 4.570373196778427e-05,
|
218 |
-
"loss": 1.6036,
|
219 |
-
"step": 300
|
220 |
-
},
|
221 |
-
{
|
222 |
-
"epoch": 0.2,
|
223 |
-
"grad_norm": 1.2617065906524658,
|
224 |
-
"learning_rate": 4.5421760615808474e-05,
|
225 |
-
"loss": 1.6244,
|
226 |
-
"step": 310
|
227 |
-
},
|
228 |
-
{
|
229 |
-
"epoch": 0.2,
|
230 |
-
"grad_norm": 1.64117431640625,
|
231 |
-
"learning_rate": 4.513175643827647e-05,
|
232 |
-
"loss": 1.6449,
|
233 |
-
"step": 320
|
234 |
-
},
|
235 |
-
{
|
236 |
-
"epoch": 0.21,
|
237 |
-
"grad_norm": 1.7132749557495117,
|
238 |
-
"learning_rate": 4.4833833507280884e-05,
|
239 |
-
"loss": 1.5948,
|
240 |
-
"step": 330
|
241 |
-
},
|
242 |
-
{
|
243 |
-
"epoch": 0.21,
|
244 |
-
"grad_norm": 2.1323654651641846,
|
245 |
-
"learning_rate": 4.4528109009727336e-05,
|
246 |
-
"loss": 1.627,
|
247 |
-
"step": 340
|
248 |
-
},
|
249 |
-
{
|
250 |
-
"epoch": 0.22,
|
251 |
-
"grad_norm": 2.253115653991699,
|
252 |
-
"learning_rate": 4.42147032012394e-05,
|
253 |
-
"loss": 1.6151,
|
254 |
-
"step": 350
|
255 |
-
},
|
256 |
-
{
|
257 |
-
"epoch": 0.23,
|
258 |
-
"grad_norm": 1.6143097877502441,
|
259 |
-
"learning_rate": 4.389373935885646e-05,
|
260 |
-
"loss": 1.5838,
|
261 |
-
"step": 360
|
262 |
-
},
|
263 |
-
{
|
264 |
-
"epoch": 0.23,
|
265 |
-
"grad_norm": 1.3353707790374756,
|
266 |
-
"learning_rate": 4.356534373254316e-05,
|
267 |
-
"loss": 1.5935,
|
268 |
-
"step": 370
|
269 |
-
},
|
270 |
-
{
|
271 |
-
"epoch": 0.24,
|
272 |
-
"grad_norm": 1.283742904663086,
|
273 |
-
"learning_rate": 4.322964549552943e-05,
|
274 |
-
"loss": 1.6015,
|
275 |
-
"step": 380
|
276 |
-
},
|
277 |
-
{
|
278 |
-
"epoch": 0.25,
|
279 |
-
"grad_norm": 1.437249779701233,
|
280 |
-
"learning_rate": 4.288677669350066e-05,
|
281 |
-
"loss": 1.577,
|
282 |
-
"step": 390
|
283 |
-
},
|
284 |
-
{
|
285 |
-
"epoch": 0.25,
|
286 |
-
"grad_norm": 1.5190638303756714,
|
287 |
-
"learning_rate": 4.2536872192658036e-05,
|
288 |
-
"loss": 1.5843,
|
289 |
-
"step": 400
|
290 |
-
},
|
291 |
-
{
|
292 |
-
"epoch": 0.26,
|
293 |
-
"grad_norm": 2.1320886611938477,
|
294 |
-
"learning_rate": 4.218006962666934e-05,
|
295 |
-
"loss": 1.6145,
|
296 |
-
"step": 410
|
297 |
-
},
|
298 |
-
{
|
299 |
-
"epoch": 0.27,
|
300 |
-
"grad_norm": 1.0696591138839722,
|
301 |
-
"learning_rate": 4.181650934253132e-05,
|
302 |
-
"loss": 1.5601,
|
303 |
-
"step": 420
|
304 |
-
},
|
305 |
-
{
|
306 |
-
"epoch": 0.27,
|
307 |
-
"grad_norm": 1.3149545192718506,
|
308 |
-
"learning_rate": 4.144633434536467e-05,
|
309 |
-
"loss": 1.5664,
|
310 |
-
"step": 430
|
311 |
-
},
|
312 |
-
{
|
313 |
-
"epoch": 0.28,
|
314 |
-
"grad_norm": 1.3661577701568604,
|
315 |
-
"learning_rate": 4.1069690242163484e-05,
|
316 |
-
"loss": 1.6002,
|
317 |
-
"step": 440
|
318 |
-
},
|
319 |
-
{
|
320 |
-
"epoch": 0.28,
|
321 |
-
"grad_norm": 1.6984481811523438,
|
322 |
-
"learning_rate": 4.06867251845213e-05,
|
323 |
-
"loss": 1.576,
|
324 |
-
"step": 450
|
325 |
-
},
|
326 |
-
{
|
327 |
-
"epoch": 0.29,
|
328 |
-
"grad_norm": 1.2728784084320068,
|
329 |
-
"learning_rate": 4.0297589810356165e-05,
|
330 |
-
"loss": 1.5448,
|
331 |
-
"step": 460
|
332 |
-
},
|
333 |
-
{
|
334 |
-
"epoch": 0.3,
|
335 |
-
"grad_norm": 1.4147616624832153,
|
336 |
-
"learning_rate": 3.9902437184657784e-05,
|
337 |
-
"loss": 1.5595,
|
338 |
-
"step": 470
|
339 |
-
},
|
340 |
-
{
|
341 |
-
"epoch": 0.3,
|
342 |
-
"grad_norm": 1.2289011478424072,
|
343 |
-
"learning_rate": 3.9501422739279956e-05,
|
344 |
-
"loss": 1.5628,
|
345 |
-
"step": 480
|
346 |
-
},
|
347 |
-
{
|
348 |
-
"epoch": 0.31,
|
349 |
-
"grad_norm": 1.5690233707427979,
|
350 |
-
"learning_rate": 3.909470421180201e-05,
|
351 |
-
"loss": 1.5731,
|
352 |
-
"step": 490
|
353 |
-
},
|
354 |
-
{
|
355 |
-
"epoch": 0.32,
|
356 |
-
"grad_norm": 1.4935098886489868,
|
357 |
-
"learning_rate": 3.8682441583483314e-05,
|
358 |
-
"loss": 1.545,
|
359 |
-
"step": 500
|
360 |
-
},
|
361 |
-
{
|
362 |
-
"epoch": 0.32,
|
363 |
-
"grad_norm": 1.2939772605895996,
|
364 |
-
"learning_rate": 3.8264797016335205e-05,
|
365 |
-
"loss": 1.5793,
|
366 |
-
"step": 510
|
367 |
-
},
|
368 |
-
{
|
369 |
-
"epoch": 0.33,
|
370 |
-
"grad_norm": 1.2150651216506958,
|
371 |
-
"learning_rate": 3.7841934789335164e-05,
|
372 |
-
"loss": 1.5378,
|
373 |
-
"step": 520
|
374 |
-
},
|
375 |
-
{
|
376 |
-
"epoch": 0.33,
|
377 |
-
"grad_norm": 1.2153139114379883,
|
378 |
-
"learning_rate": 3.741402123380828e-05,
|
379 |
-
"loss": 1.5345,
|
380 |
-
"step": 530
|
381 |
-
},
|
382 |
-
{
|
383 |
-
"epoch": 0.34,
|
384 |
-
"grad_norm": 1.290591835975647,
|
385 |
-
"learning_rate": 3.6981224668001424e-05,
|
386 |
-
"loss": 1.5517,
|
387 |
-
"step": 540
|
388 |
-
},
|
389 |
-
{
|
390 |
-
"epoch": 0.35,
|
391 |
-
"grad_norm": 1.1924967765808105,
|
392 |
-
"learning_rate": 3.654371533087586e-05,
|
393 |
-
"loss": 1.5472,
|
394 |
-
"step": 550
|
395 |
-
},
|
396 |
-
{
|
397 |
-
"epoch": 0.35,
|
398 |
-
"grad_norm": 1.6345056295394897,
|
399 |
-
"learning_rate": 3.610166531514436e-05,
|
400 |
-
"loss": 1.5564,
|
401 |
-
"step": 560
|
402 |
-
},
|
403 |
-
{
|
404 |
-
"epoch": 0.36,
|
405 |
-
"grad_norm": 2.185119867324829,
|
406 |
-
"learning_rate": 3.565524849957921e-05,
|
407 |
-
"loss": 1.5574,
|
408 |
-
"step": 570
|
409 |
-
},
|
410 |
-
{
|
411 |
-
"epoch": 0.37,
|
412 |
-
"grad_norm": 1.3646321296691895,
|
413 |
-
"learning_rate": 3.520464048061758e-05,
|
414 |
-
"loss": 1.5584,
|
415 |
-
"step": 580
|
416 |
-
},
|
417 |
-
{
|
418 |
-
"epoch": 0.37,
|
419 |
-
"grad_norm": 1.2333228588104248,
|
420 |
-
"learning_rate": 3.47500185032913e-05,
|
421 |
-
"loss": 1.518,
|
422 |
-
"step": 590
|
423 |
-
},
|
424 |
-
{
|
425 |
-
"epoch": 0.38,
|
426 |
-
"grad_norm": 1.3945318460464478,
|
427 |
-
"learning_rate": 3.4291561391508185e-05,
|
428 |
-
"loss": 1.5339,
|
429 |
-
"step": 600
|
430 |
-
},
|
431 |
-
{
|
432 |
-
"epoch": 0.38,
|
433 |
-
"grad_norm": 1.304306149482727,
|
434 |
-
"learning_rate": 3.3829449477712324e-05,
|
435 |
-
"loss": 1.5339,
|
436 |
-
"step": 610
|
437 |
-
},
|
438 |
-
{
|
439 |
-
"epoch": 0.39,
|
440 |
-
"grad_norm": 1.6393932104110718,
|
441 |
-
"learning_rate": 3.336386453195088e-05,
|
442 |
-
"loss": 1.5399,
|
443 |
-
"step": 620
|
444 |
-
},
|
445 |
-
{
|
446 |
-
"epoch": 0.4,
|
447 |
-
"grad_norm": 1.2000635862350464,
|
448 |
-
"learning_rate": 3.2894989690375626e-05,
|
449 |
-
"loss": 1.5233,
|
450 |
-
"step": 630
|
451 |
-
},
|
452 |
-
{
|
453 |
-
"epoch": 0.4,
|
454 |
-
"grad_norm": 1.1479601860046387,
|
455 |
-
"learning_rate": 3.2423009383206876e-05,
|
456 |
-
"loss": 1.538,
|
457 |
-
"step": 640
|
458 |
-
},
|
459 |
-
{
|
460 |
-
"epoch": 0.41,
|
461 |
-
"grad_norm": 1.1483389139175415,
|
462 |
-
"learning_rate": 3.194810926218861e-05,
|
463 |
-
"loss": 1.528,
|
464 |
-
"step": 650
|
465 |
-
},
|
466 |
-
{
|
467 |
-
"epoch": 0.42,
|
468 |
-
"grad_norm": 1.2403253316879272,
|
469 |
-
"learning_rate": 3.147047612756302e-05,
|
470 |
-
"loss": 1.5307,
|
471 |
-
"step": 660
|
472 |
-
},
|
473 |
-
{
|
474 |
-
"epoch": 0.42,
|
475 |
-
"grad_norm": 1.3997712135314941,
|
476 |
-
"learning_rate": 3.099029785459328e-05,
|
477 |
-
"loss": 1.4915,
|
478 |
-
"step": 670
|
479 |
-
},
|
480 |
-
{
|
481 |
-
"epoch": 0.43,
|
482 |
-
"grad_norm": 1.2010352611541748,
|
483 |
-
"learning_rate": 3.0507763319663517e-05,
|
484 |
-
"loss": 1.5268,
|
485 |
-
"step": 680
|
486 |
-
},
|
487 |
-
{
|
488 |
-
"epoch": 0.44,
|
489 |
-
"grad_norm": 1.0670932531356812,
|
490 |
-
"learning_rate": 3.002306232598497e-05,
|
491 |
-
"loss": 1.5273,
|
492 |
-
"step": 690
|
493 |
-
},
|
494 |
-
{
|
495 |
-
"epoch": 0.44,
|
496 |
-
"grad_norm": 1.2283655405044556,
|
497 |
-
"learning_rate": 2.9536385528937567e-05,
|
498 |
-
"loss": 1.5273,
|
499 |
-
"step": 700
|
500 |
-
},
|
501 |
-
{
|
502 |
-
"epoch": 0.45,
|
503 |
-
"grad_norm": 1.1306476593017578,
|
504 |
-
"learning_rate": 2.9047924361076345e-05,
|
505 |
-
"loss": 1.5072,
|
506 |
-
"step": 710
|
507 |
-
},
|
508 |
-
{
|
509 |
-
"epoch": 0.45,
|
510 |
-
"grad_norm": 1.1699943542480469,
|
511 |
-
"learning_rate": 2.8557870956832132e-05,
|
512 |
-
"loss": 1.4856,
|
513 |
-
"step": 720
|
514 |
-
},
|
515 |
-
{
|
516 |
-
"epoch": 0.46,
|
517 |
-
"grad_norm": 1.2550854682922363,
|
518 |
-
"learning_rate": 2.8066418076936167e-05,
|
519 |
-
"loss": 1.4983,
|
520 |
-
"step": 730
|
521 |
-
},
|
522 |
-
{
|
523 |
-
"epoch": 0.47,
|
524 |
-
"grad_norm": 1.0610970258712769,
|
525 |
-
"learning_rate": 2.7573759032598366e-05,
|
526 |
-
"loss": 1.5518,
|
527 |
-
"step": 740
|
528 |
-
},
|
529 |
-
{
|
530 |
-
"epoch": 0.47,
|
531 |
-
"grad_norm": 1.1754754781723022,
|
532 |
-
"learning_rate": 2.7080087609469062e-05,
|
533 |
-
"loss": 1.4998,
|
534 |
-
"step": 750
|
535 |
-
},
|
536 |
-
{
|
537 |
-
"epoch": 0.48,
|
538 |
-
"grad_norm": 1.1955766677856445,
|
539 |
-
"learning_rate": 2.6585597991414114e-05,
|
540 |
-
"loss": 1.5109,
|
541 |
-
"step": 760
|
542 |
-
},
|
543 |
-
{
|
544 |
-
"epoch": 0.49,
|
545 |
-
"grad_norm": 1.0891656875610352,
|
546 |
-
"learning_rate": 2.6090484684133404e-05,
|
547 |
-
"loss": 1.5007,
|
548 |
-
"step": 770
|
549 |
-
},
|
550 |
-
{
|
551 |
-
"epoch": 0.49,
|
552 |
-
"grad_norm": 1.0880335569381714,
|
553 |
-
"learning_rate": 2.5594942438652688e-05,
|
554 |
-
"loss": 1.5049,
|
555 |
-
"step": 780
|
556 |
-
},
|
557 |
-
{
|
558 |
-
"epoch": 0.5,
|
559 |
-
"grad_norm": 1.345954418182373,
|
560 |
-
"learning_rate": 2.509916617471903e-05,
|
561 |
-
"loss": 1.5154,
|
562 |
-
"step": 790
|
563 |
-
},
|
564 |
-
{
|
565 |
-
"epoch": 0.5,
|
566 |
-
"grad_norm": 1.1668224334716797,
|
567 |
-
"learning_rate": 2.46033509041298e-05,
|
568 |
-
"loss": 1.4883,
|
569 |
-
"step": 800
|
570 |
-
},
|
571 |
-
{
|
572 |
-
"epoch": 0.51,
|
573 |
-
"grad_norm": 1.055127501487732,
|
574 |
-
"learning_rate": 2.410769165402549e-05,
|
575 |
-
"loss": 1.5053,
|
576 |
-
"step": 810
|
577 |
-
},
|
578 |
-
{
|
579 |
-
"epoch": 0.52,
|
580 |
-
"grad_norm": 1.0528500080108643,
|
581 |
-
"learning_rate": 2.3612383390176503e-05,
|
582 |
-
"loss": 1.4871,
|
583 |
-
"step": 820
|
584 |
-
},
|
585 |
-
{
|
586 |
-
"epoch": 0.52,
|
587 |
-
"grad_norm": 1.328258991241455,
|
588 |
-
"learning_rate": 2.3117620940294048e-05,
|
589 |
-
"loss": 1.5037,
|
590 |
-
"step": 830
|
591 |
-
},
|
592 |
-
{
|
593 |
-
"epoch": 0.53,
|
594 |
-
"grad_norm": 1.0326772928237915,
|
595 |
-
"learning_rate": 2.2623598917395438e-05,
|
596 |
-
"loss": 1.4525,
|
597 |
-
"step": 840
|
598 |
-
},
|
599 |
-
{
|
600 |
-
"epoch": 0.54,
|
601 |
-
"grad_norm": 3.057058811187744,
|
602 |
-
"learning_rate": 2.213051164325366e-05,
|
603 |
-
"loss": 1.4898,
|
604 |
-
"step": 850
|
605 |
-
},
|
606 |
-
{
|
607 |
-
"epoch": 0.54,
|
608 |
-
"grad_norm": 1.1190940141677856,
|
609 |
-
"learning_rate": 2.1638553071961708e-05,
|
610 |
-
"loss": 1.488,
|
611 |
-
"step": 860
|
612 |
-
},
|
613 |
-
{
|
614 |
-
"epoch": 0.55,
|
615 |
-
"grad_norm": 1.1501041650772095,
|
616 |
-
"learning_rate": 2.1147916713641367e-05,
|
617 |
-
"loss": 1.4711,
|
618 |
-
"step": 870
|
619 |
-
},
|
620 |
-
{
|
621 |
-
"epoch": 0.56,
|
622 |
-
"grad_norm": 1.090022325515747,
|
623 |
-
"learning_rate": 2.0658795558326743e-05,
|
624 |
-
"loss": 1.488,
|
625 |
-
"step": 880
|
626 |
-
},
|
627 |
-
{
|
628 |
-
"epoch": 0.56,
|
629 |
-
"grad_norm": 1.0642565488815308,
|
630 |
-
"learning_rate": 2.017138200005236e-05,
|
631 |
-
"loss": 1.4791,
|
632 |
-
"step": 890
|
633 |
-
},
|
634 |
-
{
|
635 |
-
"epoch": 0.57,
|
636 |
-
"grad_norm": 1.3562296628952026,
|
637 |
-
"learning_rate": 1.9685867761175584e-05,
|
638 |
-
"loss": 1.4956,
|
639 |
-
"step": 900
|
640 |
-
},
|
641 |
-
{
|
642 |
-
"epoch": 0.57,
|
643 |
-
"grad_norm": 1.2069261074066162,
|
644 |
-
"learning_rate": 1.9202443816963425e-05,
|
645 |
-
"loss": 1.4918,
|
646 |
-
"step": 910
|
647 |
-
},
|
648 |
-
{
|
649 |
-
"epoch": 0.58,
|
650 |
-
"grad_norm": 1.3227437734603882,
|
651 |
-
"learning_rate": 1.872130032047302e-05,
|
652 |
-
"loss": 1.4577,
|
653 |
-
"step": 920
|
654 |
-
},
|
655 |
-
{
|
656 |
-
"epoch": 0.59,
|
657 |
-
"grad_norm": 1.0784181356430054,
|
658 |
-
"learning_rate": 1.824262652775568e-05,
|
659 |
-
"loss": 1.4888,
|
660 |
-
"step": 930
|
661 |
-
},
|
662 |
-
{
|
663 |
-
"epoch": 0.59,
|
664 |
-
"grad_norm": 1.000135898590088,
|
665 |
-
"learning_rate": 1.7766610723413684e-05,
|
666 |
-
"loss": 1.4673,
|
667 |
-
"step": 940
|
668 |
-
},
|
669 |
-
{
|
670 |
-
"epoch": 0.6,
|
671 |
-
"grad_norm": 1.136026382446289,
|
672 |
-
"learning_rate": 1.7293440146539196e-05,
|
673 |
-
"loss": 1.4779,
|
674 |
-
"step": 950
|
675 |
-
},
|
676 |
-
{
|
677 |
-
"epoch": 0.61,
|
678 |
-
"grad_norm": 1.123252272605896,
|
679 |
-
"learning_rate": 1.682330091706446e-05,
|
680 |
-
"loss": 1.4583,
|
681 |
-
"step": 960
|
682 |
-
},
|
683 |
-
{
|
684 |
-
"epoch": 0.61,
|
685 |
-
"grad_norm": 1.0559343099594116,
|
686 |
-
"learning_rate": 1.6356377962552238e-05,
|
687 |
-
"loss": 1.4471,
|
688 |
-
"step": 970
|
689 |
-
},
|
690 |
-
{
|
691 |
-
"epoch": 0.62,
|
692 |
-
"grad_norm": 1.0266658067703247,
|
693 |
-
"learning_rate": 1.589285494545514e-05,
|
694 |
-
"loss": 1.4632,
|
695 |
-
"step": 980
|
696 |
-
},
|
697 |
-
{
|
698 |
-
"epoch": 0.62,
|
699 |
-
"grad_norm": 1.1371444463729858,
|
700 |
-
"learning_rate": 1.5432914190872757e-05,
|
701 |
-
"loss": 1.4732,
|
702 |
-
"step": 990
|
703 |
-
},
|
704 |
-
{
|
705 |
-
"epoch": 0.63,
|
706 |
-
"grad_norm": 1.1203784942626953,
|
707 |
-
"learning_rate": 1.4976736614834664e-05,
|
708 |
-
"loss": 1.452,
|
709 |
-
"step": 1000
|
710 |
-
}
|
711 |
-
],
|
712 |
-
"logging_steps": 10,
|
713 |
-
"max_steps": 1584,
|
714 |
-
"num_input_tokens_seen": 0,
|
715 |
-
"num_train_epochs": 1,
|
716 |
-
"save_steps": 1000,
|
717 |
-
"total_flos": 7.003073868034212e+17,
|
718 |
-
"train_batch_size": 4,
|
719 |
-
"trial_name": null,
|
720 |
-
"trial_params": null
|
721 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-1000/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f5f270ab83bb6b8b6198abab8938484e33528253ff0e05545853f6357e16e105
|
3 |
-
size 4603
|
|
|
|
|
|
|
|