Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round10.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round12.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round15.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round17.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round2.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round20.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round5.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round7.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_trainer_state.json +392 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round10.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round12.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round15.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round17.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round2.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round20.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round5.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round7.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_trainer_state.json +392 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round10.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round12.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round15.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round17.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round2.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round20.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round5.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round7.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_trainer_state.json +392 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round10.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round12.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round15.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round17.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round2.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round20.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round5.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round7.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_trainer_state.json +392 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round10.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round12.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round15.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round17.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round2.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round20.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round5.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round7.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_trainer_state.json +392 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round10.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round12.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round15.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round17.pth +3 -0
- client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round2.pth +3 -0
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7cd9b72c102467a54798d17af40216bfda340a47e9c928110d991498f37352fe
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e25c6ccda7c6e331ba742be5fd51d6f69b5f4b378d4f279cbf21631e693c713
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:579af869f114095f217068552a0c40d05ebcb9e89cda67b5455ec2d2694a2c20
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:55f10e15c3e0c6e60298a05d6907c98495e7ada8a1f57764544fd3fe18156250
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7902b8470612bde23149de6ce83d22e0a10cda6fe6527ca93a47cd0c8067afe
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dc6bf0700f807db2c7cbfd1d47510b21c70c0365cb1a5c3a099a7e21ad210582
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0a299ef241e8f742ea1657eafa2f0aa5ebbe006a0197111f56758dd2dc6b3499
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5138f690d9121a2e0460ccffc2733e479cc693db04a3525cce8af64bb024be7a
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/0_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 5.894064426422119,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.9085,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 1.3201831579208374,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.3126,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 2.121770143508911,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.7891,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 4.0768280029296875,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 1.0664,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 1.1001958847045898,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 1.078,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 2.823949098587036,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 0.3738,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 1.2138632535934448,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.1228,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 2.490877628326416,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.6624,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 9.688061714172363,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 1.0531,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 3.0712640285491943,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 2.184,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 0.8509835004806519,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.1043,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 1.7280430793762207,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.1391,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 0.14078085124492645,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.0165,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 9.411897659301758,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 1.9539,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 0.2108028680086136,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 0.0312,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 1.6999951601028442,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.5234,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 3.3077850341796875,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.8673,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 0.1280887871980667,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.2362,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 1.76698637008667,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.1529,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 3.7104475498199463,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.6142,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 0.521893322467804,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.1496,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 0.5190557837486267,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.5708,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 3.1117076873779297,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.5076,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 5.133110523223877,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.7732,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 0.7691269516944885,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.2829,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 3.188059091567993,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.7398,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 3.092348575592041,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.299,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 2.5498709678649902,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 1.4419,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 3.921797275543213,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.285,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 3.740967273712158,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.6471,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 3.9472544193267822,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 1.4534,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 0.546498715877533,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.0518,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 6.151440143585205,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 1.2357,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 0.18737871944904327,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.1359,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 4.142384052276611,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.6293,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 1.109649419784546,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.0879,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 3.949535608291626,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.4062,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 1.329496145248413,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 1.1151,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 2.8130714893341064,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 1.1395,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 1.4879205226898193,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.167,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 1.950775384902954,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 1.5502,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 2.520659923553467,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.4802,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 0.47688207030296326,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.2397,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 1.8515924215316772,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.1466,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 2.2912800312042236,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.1982,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 0.22282549738883972,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.0881,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 2.608417272567749,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.3884,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 0.5378755331039429,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.1059,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 1.9015717506408691,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.417,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 0.07535640150308609,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.2791,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 4974012128034816.0,
|
365 |
+
"train_loss": 0.5840333950519562,
|
366 |
+
"train_runtime": 160.7903,
|
367 |
+
"train_samples_per_second": 2.488,
|
368 |
+
"train_steps_per_second": 0.622
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 4974012128034816.0,
|
389 |
+
"train_batch_size": 1,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:040fc1ba02166da25b53124eafd7ab770676f8ae9902cfdaa1fd9419fb975fb8
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66b27e044a5bef36de82e366639214d4b09b43709be6e33cf4e497de1d123f7b
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0ae2913055c4818c7fd3525c69ed67558d07bfdf1a04928bf20b6e0ec84589bd
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5a750a9eaf2737a6051a7e2f44d5637e00df4c42ad6bf3f2cb5efe4548e30071
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d8b6c4b1a4df16e1fd1d9aaaaa959fb6ccc43c475a8a7642a0a98dcce7ecdf5e
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eaa4f8cff476a8ac9e4d7d695aa2237b7ff0a1a1c4f5e21d166ecf80155bcc78
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4db54eaad21943935cf7a0ac6fb6e20d00916bf5ebd09fcb20ef845668c17e76
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9d949af7f4e051c441c783d35219de277685e59671951e697ba610a0b6817109
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/1_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 2.7926690578460693,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.3634,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 0.6020875573158264,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.5082,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 0.04904913157224655,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.003,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 0.08567889034748077,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 0.0071,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 0.0006135515286587179,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.0015,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 3.915755033493042,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 0.1474,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 0.04489617794752121,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.0029,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 0.003059796988964081,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.0024,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 0.012128287926316261,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 0.0009,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 0.003509308211505413,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 0.0098,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 0.6594346165657043,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.1193,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 3.0088326930999756,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.1751,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 0.003368888283148408,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.0002,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 0.09144292771816254,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.2113,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 0.0011707392986863852,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 0.0006,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 0.00044834177242591977,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.0001,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 0.0155986612662673,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.0009,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 0.008986121974885464,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.0024,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 0.0013730537611991167,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.0001,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 0.0022560814395546913,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.1583,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 0.7707070112228394,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.0477,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 0.0005886413273401558,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.5489,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 0.003063227515667677,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.0086,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 0.0037043734919279814,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.0003,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 0.0012796800583600998,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.0012,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 0.010248442180454731,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.0005,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 0.0034741233102977276,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.0003,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 0.1743801087141037,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.0068,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 0.007030021399259567,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.2121,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 0.19218796491622925,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.0046,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 0.00157554994802922,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.0013,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 0.0012670699506998062,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.0001,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 0.20495271682739258,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.18,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 0.24123035371303558,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.0098,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 0.5640333890914917,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.0182,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 0.00047914290917105973,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.0003,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 0.05535870045423508,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.0027,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 0.9184343814849854,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.0988,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 0.017705975100398064,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.1589,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 1.9290534257888794,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.0509,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 0.0019139001378789544,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.0002,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 0.03513801470398903,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.0049,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 0.005002826452255249,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.023,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 0.006781861651688814,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.0004,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 0.44860827922821045,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.0252,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 0.0025509921833872795,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.2639,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 0.1632906198501587,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.0047,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 0.006769334431737661,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.0002,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 0.008943594060838223,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.0038,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 0.0011910570319741964,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.0005,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 5013697617461248.0,
|
365 |
+
"train_loss": 0.06788168847560883,
|
366 |
+
"train_runtime": 159.9459,
|
367 |
+
"train_samples_per_second": 2.501,
|
368 |
+
"train_steps_per_second": 0.625
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 5013697617461248.0,
|
389 |
+
"train_batch_size": 1,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:97178d8b4ef002af17e1c1babc854b69d498b28af4f39cce2d0dd39fedbde7ce
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d8066065f2acdac23b4511d758ce20c24502ec9ab5def97b2e7bc0b46c963210
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4fc26b83020d42b0beeabe2c32501098b86237d6c8f2feed8bbe7ab778d51e88
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b988bdabff0eebbb189e63cc27e00dcece2b2ac4230235431dcb0f98f3df503d
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d4d2e4922163550bfb783bf8cdf258af5a948ad085565b2004a5054284dbaa21
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2f9418ec35221a73971914591676bda02aaa4cc416f5bf8770c359c850795549
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9fbbcfcb71a48141155ec932e5d4f9d86d306cec4d8589381b8e7d98b2317a5a
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:de2ff3db9a0037bfab3244f817729ff083e6446bb75dd088cd91b89d1bccaf4e
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/2_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 1.9014546871185303,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.8479,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 2.2428691387176514,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.5352,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 0.7876530289649963,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.197,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 2.0728108882904053,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 0.3382,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 2.316784620285034,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.4009,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 1.4547951221466064,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 1.1106,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 5.191783428192139,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 1.1847,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 2.597446918487549,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.5215,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 1.2228509187698364,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 0.2476,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 4.161182880401611,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 1.322,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 1.1612755060195923,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.3423,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 3.0303940773010254,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.3911,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 0.7589966058731079,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.237,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 2.836357593536377,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 1.1011,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 3.2222888469696045,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 1.3152,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 2.079301118850708,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.6851,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 5.449805736541748,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 1.6285,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 0.7335667610168457,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.1028,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 5.621280670166016,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.8429,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 0.6560351848602295,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.3729,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 0.6034119129180908,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.0629,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 2.0162291526794434,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.2953,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 0.5169147849082947,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.3583,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 1.3467202186584473,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.2518,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 0.08579041063785553,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.1946,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 0.8094596266746521,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.4245,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 3.2027623653411865,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.9185,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 6.3284478187561035,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 1.153,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 5.553554058074951,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 1.1882,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 4.07298469543457,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.657,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 1.5594613552093506,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.1507,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 0.9592647552490234,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.5403,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 1.393865704536438,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.214,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 3.127103567123413,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.578,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 1.9923616647720337,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.475,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 1.7169052362442017,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.6912,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 1.8878061771392822,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.5777,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 0.5864697098731995,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.3095,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 1.0971084833145142,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.4023,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 3.0812549591064453,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 1.4656,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 1.087915301322937,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.6259,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 0.559573233127594,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.2886,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 5.675575256347656,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.8127,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 1.2897826433181763,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.2609,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 2.957866668701172,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.2235,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 2.3432726860046387,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.657,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 5.912756443023682,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.6071,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 1.4021425247192383,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.6625,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 0.7503753900527954,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.4068,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 7.381590366363525,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.9321,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 4913814755606528.0,
|
365 |
+
"train_loss": 0.6022030544281006,
|
366 |
+
"train_runtime": 160.1433,
|
367 |
+
"train_samples_per_second": 2.498,
|
368 |
+
"train_steps_per_second": 0.624
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 4913814755606528.0,
|
389 |
+
"train_batch_size": 1,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12662a115ad2c2c1782b5a4b2a2b9b626b7b21bfffefe471f1cf0884dbc07775
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf633a0b5fb896130a3119571a77f610cee784ce26f06017aa6146af6a1e7291
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8b0d932a65e55df3cc8f20317dd81eece079e91c19ea65c942cbfd669a3969f3
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a6852d881c1f756efb68d80a21cfc2dd501ff10349dfd6fac3517650134d2471
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7e5a706e5f71613a148a641e094205e1d97e0b4f5776f1715a10269608796e6d
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:62ea36682ef1abda8799e6bada7ecdd090db0bcbaf6f371a0cede135e0e44b35
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7b171e756d9c7f4c802172c232f513ca798305059851682ff0f5d1c797e99bd0
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0238c056c8e82b97d019db62946e1d34fc3a8c834ab8cb1bef720927562f6cdb
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/3_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 3.097942590713501,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.5807,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 2.9074313640594482,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 1.5071,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 2.6304781436920166,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.4908,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 4.46525764465332,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 1.0056,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 2.3878512382507324,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.3701,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 2.7448008060455322,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 1.0356,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 1.3460839986801147,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.2344,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 4.188470363616943,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.8728,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 5.190163612365723,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 1.6686,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 2.3317737579345703,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 1.6924,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 3.992833137512207,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.6843,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 4.71842098236084,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.8008,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 1.7787336111068726,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.4146,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 3.0199549198150635,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.7224,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 4.692366123199463,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 1.0116,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 6.386943340301514,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 1.5629,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 2.464090585708618,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 1.045,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 5.253996849060059,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.8652,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 2.0147178173065186,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.8926,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 6.174330234527588,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.5714,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 5.197195053100586,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 2.0066,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 5.705170631408691,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.8634,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 2.936582088470459,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.688,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 1.3450106382369995,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.8493,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 3.475832223892212,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 1.1124,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 3.734349012374878,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.4247,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 6.65780782699585,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 1.8004,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 4.2812113761901855,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.8878,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 6.450176239013672,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 1.1367,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 2.53096079826355,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 1.2302,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 1.3121376037597656,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.3248,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 2.2921791076660156,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.9923,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 1.9732469320297241,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.3496,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 19.43094253540039,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 1.545,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 2.824756383895874,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 1.0909,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 5.171675205230713,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 1.377,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 4.2659010887146,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.9467,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 0.9447065591812134,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.3376,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 4.128762245178223,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 1.0416,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 1.5198454856872559,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.495,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 1.582706093788147,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 1.2556,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 2.893664836883545,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.3995,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 2.9730072021484375,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 1.1895,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 1.6474361419677734,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 2.0776,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 5.00705623626709,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 1.5599,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 2.9408862590789795,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.8146,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 2.137223958969116,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 1.1117,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 3.60581636428833,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 1.0244,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 2.505368947982788,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.389,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 2.0294015407562256,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.345,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 5082403345268736.0,
|
365 |
+
"train_loss": 0.9539071077108383,
|
366 |
+
"train_runtime": 160.1565,
|
367 |
+
"train_samples_per_second": 2.498,
|
368 |
+
"train_steps_per_second": 0.624
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 5082403345268736.0,
|
389 |
+
"train_batch_size": 1,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e5c4705c320eddc8a7424fda148c8b02e628026e27bcf1434e4f0c6d74e5d138
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4a9165cbc5b90d7e2f92b1037f052e1af20ee867c99dae80afc8c384c2a838a3
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8d47fa91dd65b087cc768981cdd66f10cf523ee2fca6f9d6215d3fddc164361a
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ad8799bd379e86ab9d60e4f9d5cc1dad0d3a7db5f0cc8db96502ae0c0cc5153c
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bf20522100437eb2f78d58fd3a6f25ec8a70fff9f2a2655f8f213a2b8ef52045
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e8f1850bc34be260602927eced981c1f91bb98c31eaf3c265e216c4d225d3ea5
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:70b28c6aaf6947b5e7f00736d094de8ae17ffa1a505d1c0450dc8bc825e6dae9
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7141336f1173475c9692c2668fcdbf5a26bad47c86d62f45b5794f9e3d4f0f38
|
3 |
+
size 389170122
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/4_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 1.7694361209869385,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.6797,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 1.712048053741455,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.3863,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 2.2159461975097656,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.8038,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 3.892475128173828,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 2.343,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 2.8474981784820557,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.7891,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 3.7567367553710938,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 1.2256,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 3.1084630489349365,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.5378,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 2.597458600997925,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 1.1296,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 2.7688543796539307,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 1.6792,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 2.0053212642669678,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 0.8291,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 2.2323999404907227,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.7659,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 0.9668832421302795,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.4112,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 2.050217628479004,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.5468,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 2.0268161296844482,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.7263,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 1.6333857774734497,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 1.1593,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 2.8671631813049316,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.9164,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 2.747903823852539,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.893,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 1.8260470628738403,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 1.0974,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 1.2476838827133179,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.5784,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 1.1296557188034058,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.5419,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 1.772802710533142,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.7333,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 2.721181631088257,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.7137,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 2.527193069458008,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.5528,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 2.12138032913208,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 1.0548,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 0.6518544554710388,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.3414,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 4.018988132476807,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 1.5555,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 1.6130986213684082,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.4019,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 0.33242765069007874,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.2922,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 1.9729520082473755,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.6715,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 0.9380380511283875,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.3047,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 1.6756107807159424,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.7653,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 6.233062744140625,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 1.116,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 1.7454715967178345,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.2272,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 1.9794038534164429,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.9766,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 2.021594762802124,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 1.1175,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 2.5316901206970215,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.7159,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 3.6453006267547607,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 1.1686,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 0.7126204967498779,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.2293,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 1.6401352882385254,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.4406,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 8.485867500305176,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 1.04,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 1.8455100059509277,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.9729,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 3.618323564529419,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.7559,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 4.153294563293457,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.9314,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 3.3631463050842285,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 1.1333,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 3.2221667766571045,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.6,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 2.3827831745147705,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.8038,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 1.3622207641601562,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.3635,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 1.6388647556304932,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.7245,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 2.3859128952026367,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.4281,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 3.1130776405334473,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.6673,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 5687732361756672.0,
|
365 |
+
"train_loss": 0.7967825222015381,
|
366 |
+
"train_runtime": 161.3161,
|
367 |
+
"train_samples_per_second": 2.48,
|
368 |
+
"train_steps_per_second": 0.62
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 5687732361756672.0,
|
389 |
+
"train_batch_size": 1,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7120f59b3e327f46b7701185d0fbe755cc4c3708b15e42aa9340697ccb63820e
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3e27fa87cee19c29aa37b2f4d56a73f5194311c97b396574ee0b5c82b1e2a56e
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:83f58ccf6396ad783df7ff3e8b5c3d28eefa3b6c37f471ac05e34f0c22d1e05b
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0a9e56f3289a41a3ef33f94549b9d9770c5e16eba0b1aa89ba2bf47af5358d83
|
3 |
+
size 389170582
|
client_states_fedavg_saveoptim_lr2e-5_sc310_4tasks_5rounds_fixit100_T0125_decay099_SEED2/5_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40c301bd4f0f805d33e15d5d6ef5bca41d790003760a57770a9d0bec8ce5fa6e
|
3 |
+
size 389170122
|