Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round10.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round12.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round15.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round17.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round2.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round20.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round5.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round7.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_trainer_state.json +392 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round10.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round12.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round15.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round17.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round2.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round20.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round5.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round7.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_trainer_state.json +392 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round10.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round12.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round15.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round17.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round2.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round20.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round5.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round7.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_trainer_state.json +392 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round10.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round12.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round15.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round17.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round2.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round20.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round5.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round7.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_trainer_state.json +392 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round10.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round12.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round15.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round17.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round2.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round20.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round5.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round7.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_trainer_state.json +392 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round10.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round12.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round15.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round17.pth +3 -0
- client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round2.pth +3 -0
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8d4dbd7b57117ebc0e92f840772e7b8d9326ec963af97ee7c8ed8f5b4ee5a16c
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:da9f5258d69f82613d56ddb767feca00d546a6022909992b29e3e4bfa2387dfb
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cb29437e912422ee086e5c8e7522fa85e62c7cd132f24649d8c01ae226142003
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9ad3cfb989f0199c40ada0bf07b6861730ad35c59050da0ad718530ab4d7bbaa
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d2f4127fb143b2ffa86130e606a7d9aa32b52faedb673e200e7ce733eb83b521
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:abc0687c29d3aea2e03a80a0b7b8b996288fbd8cdeb741b7c2bae13fb5d4268d
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ad22cdcb5a353bd143900ef12f0b22ba7bbdffab96c7bc1a0a252b43d8e736cf
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f082d8ba919fb52b4c5c6ca1498321a6fe472fbfc9ea29e8eaea4c30e8b2a987
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/0_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 0.7681789398193359,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.1667,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 1.9858031272888184,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.1653,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 7.460695266723633,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.1575,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 4.774257183074951,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 0.3906,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 0.697665274143219,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.0591,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 2.0891075134277344,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 0.1472,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 3.9881222248077393,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.3101,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 2.5707805156707764,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.1572,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 4.1817803382873535,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 0.2539,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 2.0925190448760986,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 0.1567,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 1.5714080333709717,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.1296,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 0.7355799674987793,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.0603,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 1.4088470935821533,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.1611,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 3.11065936088562,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.1589,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 7.533633708953857,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 0.2048,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 2.762478828430176,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.2603,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 0.6360326409339905,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.073,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 2.446553945541382,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.1926,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 4.585399150848389,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.3315,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 1.1877154111862183,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.2788,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 3.592831611633301,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.1753,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 4.399947166442871,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.1438,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 2.7166435718536377,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.1648,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 1.6287264823913574,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.0942,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 3.5515120029449463,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.2102,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 1.0096092224121094,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.087,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 9.327820777893066,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.4895,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 2.359659194946289,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.2271,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 1.62618088722229,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.0863,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 5.553459167480469,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.2427,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 0.9122878313064575,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.0432,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 2.588026762008667,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.2168,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 0.6846867203712463,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.0696,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 1.5343677997589111,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.0789,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 1.9839800596237183,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.106,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 2.914919376373291,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.1992,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 7.914262294769287,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.1443,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 1.7149121761322021,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.2246,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 2.287249803543091,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.3584,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 3.400071620941162,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.4287,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 1.0164868831634521,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.166,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 4.102115154266357,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.2234,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 5.070318222045898,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.23,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 4.395050525665283,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.3926,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 1.2536754608154297,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.0894,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 0.3888511061668396,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.0743,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 0.8675658106803894,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.0897,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 1.6503221988677979,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.1399,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 3.524456739425659,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.2627,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 4.795688152313232,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.2783,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 1.865191632614195e+16,
|
365 |
+
"train_loss": 0.191044921875,
|
366 |
+
"train_runtime": 303.6413,
|
367 |
+
"train_samples_per_second": 1.317,
|
368 |
+
"train_steps_per_second": 0.329
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 1.865191632614195e+16,
|
389 |
+
"train_batch_size": 4,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf1945623050c0fbfa439f0308b407f01c192adfb55deb3d8ece723ddf3073b4
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a50beb931d0b17c7f4fecc51a4111e38d003d51b6f524091fb9c128c20d16cf7
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4900b6a620fb460d7eebe63c32a1d3aa7021f1a82ce189ece2e75b654f84b387
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:67a35103139b2719bf6cd82156594ec03e472045e38a8abacec5730171b5dea7
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:525c0ed366ae1187d55a0c12c194b102495917c4e7ca13dc617503c27c2a30a0
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c43ffd9200389e0fb5e68ee38e6c992bbbdd068e4e3147f32fd334c58ee995bc
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:82afcf2e9815bc492277eaf9b495d2f112933333b7c813eb4516c0c4f743c580
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9315733ea4fc115f4683a83e8591a02c6aa572df2cb21342457ed0dd7e41408f
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/1_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 5.162927150726318,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.3672,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 3.8139963150024414,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.2764,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 2.4045088291168213,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.2886,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 3.0921738147735596,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 0.2959,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 1.3632135391235352,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.2969,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 2.172605276107788,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 0.2148,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 2.9686121940612793,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.2988,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 1.4601385593414307,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.2915,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 1.6114205121994019,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 0.2109,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 2.0597972869873047,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 0.249,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 2.3526787757873535,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.2544,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 3.1127853393554688,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.2817,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 1.1098952293395996,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.1846,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 2.701572895050049,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.1963,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 2.284106492996216,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 0.3916,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 2.42879056930542,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.2969,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 1.040755271911621,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.3027,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 3.8482613563537598,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.3052,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 3.358405351638794,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.1365,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 2.249178647994995,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.2383,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 1.7586663961410522,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.187,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 1.6459927558898926,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.2646,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 0.9291881322860718,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.168,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 1.067496418952942,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.1135,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 3.1252028942108154,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.3447,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 4.299633979797363,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.4619,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 4.607396602630615,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.2954,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 1.2473145723342896,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.1025,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 2.085881233215332,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.2085,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 2.942441701889038,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.209,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 3.026491403579712,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.4785,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 2.2812161445617676,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.1526,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 1.2541882991790771,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.4092,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 3.7665891647338867,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.3896,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 1.1083252429962158,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.2983,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 1.9806040525436401,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.2466,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 0.943515419960022,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.2144,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 2.462261199951172,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.2729,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 3.1030101776123047,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.2119,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 6.973886013031006,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.3633,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 0.9282856583595276,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.1824,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 0.6491354703903198,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.2366,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 3.987030267715454,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.2979,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 1.5676429271697998,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.2666,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 1.862716555595398,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.2744,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 1.8054755926132202,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.2358,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 2.7789947986602783,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.2129,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 2.808908224105835,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.1958,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 0.9118070602416992,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.2136,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 1.6127201318740845,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.4141,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 1.8301597519970304e+16,
|
365 |
+
"train_loss": 0.266015625,
|
366 |
+
"train_runtime": 299.9026,
|
367 |
+
"train_samples_per_second": 1.334,
|
368 |
+
"train_steps_per_second": 0.333
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 1.8301597519970304e+16,
|
389 |
+
"train_batch_size": 4,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:861a370e45b2f9f01d78fc016f5bad3765cca6c978c9af9f30a409756bc51e97
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a80e9571fc8680486ede0be802ca180741169727a4799492afe1064e86697ce6
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ed788184c46a51da9d846cb49e541da4a2e3303c5ad0cee622ee01f3f188086
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2d00e2ab2a81a40f620636edb349d89c1e7ef19e79aa22e78e9100ffffc93ca8
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be877f0fc2f11753b74dbcea075c70433b27c2d4d4b1a5154dff3c1cb51d1cb0
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1f0172c9ac94f65a75ec956194f5bce1600b80ab128bf0da604a63c6213e9f3d
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e10edfea7280d36f28e2b9a0afe06770f40903159e887924c4e6121a7b1969bc
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8b3c446267413329156d5148c0ba8a8d04c49d5c62a923ab6551c659c2d06ff7
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/2_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 5.92473030090332,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.2788,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 1.1622978448867798,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.082,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 2.6461238861083984,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.1726,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 2.0796098709106445,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 0.0748,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 2.907897710800171,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.1819,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 11.616080284118652,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 0.2549,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 6.870760440826416,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.5967,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 5.469508171081543,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.3545,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 3.4311599731445312,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 0.3042,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 1.9978313446044922,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 0.1797,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 2.14152193069458,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.2578,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 1.08518648147583,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.1743,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 2.6409809589385986,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.2168,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 5.836951732635498,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.2664,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 0.8253288865089417,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 0.193,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 3.445699691772461,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.1362,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 1.3368934392929077,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.1948,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 7.334422588348389,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.27,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 1.5522675514221191,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.1226,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 3.0696229934692383,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.1794,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 2.5367555618286133,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.2666,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 1.771532654762268,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.1055,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 1.6121385097503662,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.1382,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 0.840958297252655,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.1956,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 9.105291366577148,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.3271,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 2.591594934463501,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.2515,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 2.4830551147460938,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.2068,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 0.9077150225639343,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.1003,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 2.9782874584198,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.1656,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 1.8825637102127075,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.1685,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 7.019435405731201,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.3867,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 1.12417733669281,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.21,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 3.1205978393554688,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.2744,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 2.8004539012908936,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.1589,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 2.7644095420837402,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.2217,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 1.4835090637207031,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.1443,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 3.9295995235443115,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.3276,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 4.166787624359131,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.373,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 1.4152839183807373,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.0529,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 1.817052960395813,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.1982,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 6.036396503448486,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.2522,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 1.7961066961288452,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.104,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 2.2944209575653076,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.1484,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 6.894327163696289,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.3477,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 0.9235769510269165,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.0969,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 1.7430696487426758,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.1968,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 5.987889289855957,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.5039,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 8.167346000671387,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.3647,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 7.413778781890869,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.3555,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 6.16605806350708,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.3892,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 2.497659436793856e+16,
|
365 |
+
"train_loss": 0.23048248291015624,
|
366 |
+
"train_runtime": 395.796,
|
367 |
+
"train_samples_per_second": 1.011,
|
368 |
+
"train_steps_per_second": 0.253
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 2.497659436793856e+16,
|
389 |
+
"train_batch_size": 4,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bf003a0715646d51d7d8dd69d21438f861fcc6bf462e5aa8b8d7752680923ad4
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3c20bbb50c4867bd9ab7a8e7ba989ef137b20f09819a143823d4696a7bd86314
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a24d0cd89ede47f27ec36b49824613b974b1ad1d6f2ce20319ec7de50d1912d3
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:961c2839e9016d39e6c46226ca8319aff4d60ffa6c4c139c293d6bb9555fd5a3
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:55a9ba8f67f33396c7161c03dd5de4bae4441d29685ab799357ff0caffb33145
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a1a294262ae2a18fa9aaaeba2a89bf8e85512f01dc73429613b0ed7525403875
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8af1304b56ea2a178b1a623e04cbc466952d93a3685d3735b8f66ca282a1cb2e
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:724fe45e0487e8d17e203794b8410bfaba92fc29067f298af8ed9447274c1f86
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/3_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 5.450788974761963,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.4023,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 4.3735480308532715,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.8789,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 3.179641008377075,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.2146,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 1.3362727165222168,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 0.2378,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 1.5938769578933716,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.2266,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 0.8245390057563782,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 0.113,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 0.9300125241279602,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.1543,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 0.7108556032180786,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.1973,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 1.0650441646575928,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 0.3037,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 0.7739357352256775,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 0.1987,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 3.9206535816192627,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.293,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 6.544978618621826,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.5303,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 0.8539941906929016,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.1707,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 1.50163733959198,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.3721,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 0.9231382012367249,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 0.3438,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 1.4241496324539185,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.2148,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 2.346113681793213,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.249,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 1.622445821762085,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.2319,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 1.527666449546814,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.2397,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 1.2725470066070557,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.2456,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 2.2991366386413574,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.2217,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 2.7729849815368652,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.2349,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 3.2132370471954346,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.3203,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 0.9868631958961487,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.1399,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 1.8213165998458862,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.3296,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 1.6022188663482666,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.1272,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 5.657681465148926,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.3325,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 1.7171556949615479,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.2881,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 1.9206146001815796,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.2173,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 1.2901687622070312,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.1465,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 1.38545823097229,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.1621,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 2.096327543258667,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.2964,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 1.434394359588623,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.3135,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 2.6365418434143066,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.085,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 0.9778931736946106,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.1101,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 2.0804357528686523,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.1011,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 8.342849731445312,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.0988,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 4.158697128295898,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.231,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 0.9794576168060303,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.3228,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 1.1897964477539062,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.0883,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 2.8258769512176514,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.2031,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 5.6181559562683105,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.3669,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 1.9324073791503906,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.2578,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 5.12706995010376,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.3032,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 6.049209117889404,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.3042,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 2.761207342147827,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.2314,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 0.6125823259353638,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.1582,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 2.5384273529052734,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.1904,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 2.4318525791168213,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.0905,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 0.46588173508644104,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.064,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 2.7011800205623296e+16,
|
365 |
+
"train_loss": 0.24309326171875,
|
366 |
+
"train_runtime": 440.2463,
|
367 |
+
"train_samples_per_second": 0.909,
|
368 |
+
"train_steps_per_second": 0.227
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 2.7011800205623296e+16,
|
389 |
+
"train_batch_size": 4,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4179fd368b67028dd973f7abd88a2d2a03025f61ef13e87a1e8cbd6ce4fd382f
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1ff94ef4c83754c6ad4c8211c740a6727c24bc4aef38665e83b9de6338c9ea17
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3642f0c9dce5029bac42c5a27aff3ea37fd82a4c917ab8b4e7dcd5064a1e619c
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8a0adbad2f5a8aa264d7936fdaff6b05707fe37b556aacd726bccddaca735b6d
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6b8d8d8d43b5ceb413dcecdc8d13b620986f13fb0c744be8a6a62135b83a4f30
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round20.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:729f2bc8bd697b0e8bd9dec495b719b3075d07b956bd6345027f7bc2031ce56f
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b1b40d85f0086ea0f070724cf28d56f500cfd32f8102720ced8eef38895822c4
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_client_model_round7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b179b032ff4cd4a2bcd2f9207e0ce04655651fdf93d07cf79590154002a5bd7
|
3 |
+
size 389170122
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/4_trainer_state.json
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 100,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.02,
|
13 |
+
"grad_norm": 1.2775617837905884,
|
14 |
+
"learning_rate": 2e-05,
|
15 |
+
"loss": 0.0959,
|
16 |
+
"step": 2
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.04,
|
20 |
+
"grad_norm": 0.6514262557029724,
|
21 |
+
"learning_rate": 2e-05,
|
22 |
+
"loss": 0.0951,
|
23 |
+
"step": 4
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.06,
|
27 |
+
"grad_norm": 1.4927728176116943,
|
28 |
+
"learning_rate": 2e-05,
|
29 |
+
"loss": 0.1514,
|
30 |
+
"step": 6
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.08,
|
34 |
+
"grad_norm": 0.7910555601119995,
|
35 |
+
"learning_rate": 2e-05,
|
36 |
+
"loss": 0.0729,
|
37 |
+
"step": 8
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.1,
|
41 |
+
"grad_norm": 3.8304953575134277,
|
42 |
+
"learning_rate": 2e-05,
|
43 |
+
"loss": 0.2334,
|
44 |
+
"step": 10
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.12,
|
48 |
+
"grad_norm": 1.3381283283233643,
|
49 |
+
"learning_rate": 2e-05,
|
50 |
+
"loss": 0.2092,
|
51 |
+
"step": 12
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.14,
|
55 |
+
"grad_norm": 0.2959333658218384,
|
56 |
+
"learning_rate": 2e-05,
|
57 |
+
"loss": 0.0399,
|
58 |
+
"step": 14
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.16,
|
62 |
+
"grad_norm": 3.57353138923645,
|
63 |
+
"learning_rate": 2e-05,
|
64 |
+
"loss": 0.3037,
|
65 |
+
"step": 16
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.18,
|
69 |
+
"grad_norm": 0.645243763923645,
|
70 |
+
"learning_rate": 2e-05,
|
71 |
+
"loss": 0.1586,
|
72 |
+
"step": 18
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.2,
|
76 |
+
"grad_norm": 1.9823670387268066,
|
77 |
+
"learning_rate": 2e-05,
|
78 |
+
"loss": 0.0765,
|
79 |
+
"step": 20
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.22,
|
83 |
+
"grad_norm": 3.530198812484741,
|
84 |
+
"learning_rate": 2e-05,
|
85 |
+
"loss": 0.1733,
|
86 |
+
"step": 22
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.24,
|
90 |
+
"grad_norm": 0.9225659966468811,
|
91 |
+
"learning_rate": 2e-05,
|
92 |
+
"loss": 0.2864,
|
93 |
+
"step": 24
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.26,
|
97 |
+
"grad_norm": 5.3893327713012695,
|
98 |
+
"learning_rate": 2e-05,
|
99 |
+
"loss": 0.8496,
|
100 |
+
"step": 26
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.28,
|
104 |
+
"grad_norm": 3.3154380321502686,
|
105 |
+
"learning_rate": 2e-05,
|
106 |
+
"loss": 0.4512,
|
107 |
+
"step": 28
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.3,
|
111 |
+
"grad_norm": 3.843571186065674,
|
112 |
+
"learning_rate": 2e-05,
|
113 |
+
"loss": 0.3457,
|
114 |
+
"step": 30
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.32,
|
118 |
+
"grad_norm": 2.980067729949951,
|
119 |
+
"learning_rate": 2e-05,
|
120 |
+
"loss": 0.2661,
|
121 |
+
"step": 32
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.34,
|
125 |
+
"grad_norm": 2.548189401626587,
|
126 |
+
"learning_rate": 2e-05,
|
127 |
+
"loss": 0.2275,
|
128 |
+
"step": 34
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.36,
|
132 |
+
"grad_norm": 1.518248438835144,
|
133 |
+
"learning_rate": 2e-05,
|
134 |
+
"loss": 0.3413,
|
135 |
+
"step": 36
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.38,
|
139 |
+
"grad_norm": 1.61312735080719,
|
140 |
+
"learning_rate": 2e-05,
|
141 |
+
"loss": 0.0928,
|
142 |
+
"step": 38
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.4,
|
146 |
+
"grad_norm": 4.421812534332275,
|
147 |
+
"learning_rate": 2e-05,
|
148 |
+
"loss": 0.293,
|
149 |
+
"step": 40
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.42,
|
153 |
+
"grad_norm": 0.13254289329051971,
|
154 |
+
"learning_rate": 2e-05,
|
155 |
+
"loss": 0.0839,
|
156 |
+
"step": 42
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.44,
|
160 |
+
"grad_norm": 1.1233917474746704,
|
161 |
+
"learning_rate": 2e-05,
|
162 |
+
"loss": 0.2109,
|
163 |
+
"step": 44
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.46,
|
167 |
+
"grad_norm": 1.286293864250183,
|
168 |
+
"learning_rate": 2e-05,
|
169 |
+
"loss": 0.2073,
|
170 |
+
"step": 46
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.48,
|
174 |
+
"grad_norm": 0.5999717116355896,
|
175 |
+
"learning_rate": 2e-05,
|
176 |
+
"loss": 0.1538,
|
177 |
+
"step": 48
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.5,
|
181 |
+
"grad_norm": 1.8072141408920288,
|
182 |
+
"learning_rate": 2e-05,
|
183 |
+
"loss": 0.1875,
|
184 |
+
"step": 50
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.52,
|
188 |
+
"grad_norm": 1.4119832515716553,
|
189 |
+
"learning_rate": 2e-05,
|
190 |
+
"loss": 0.3901,
|
191 |
+
"step": 52
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.54,
|
195 |
+
"grad_norm": 2.0136523246765137,
|
196 |
+
"learning_rate": 2e-05,
|
197 |
+
"loss": 0.1553,
|
198 |
+
"step": 54
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.56,
|
202 |
+
"grad_norm": 0.5950088500976562,
|
203 |
+
"learning_rate": 2e-05,
|
204 |
+
"loss": 0.0686,
|
205 |
+
"step": 56
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.58,
|
209 |
+
"grad_norm": 1.3284698724746704,
|
210 |
+
"learning_rate": 2e-05,
|
211 |
+
"loss": 0.2051,
|
212 |
+
"step": 58
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 0.6,
|
216 |
+
"grad_norm": 0.8365779519081116,
|
217 |
+
"learning_rate": 2e-05,
|
218 |
+
"loss": 0.0698,
|
219 |
+
"step": 60
|
220 |
+
},
|
221 |
+
{
|
222 |
+
"epoch": 0.62,
|
223 |
+
"grad_norm": 1.1531637907028198,
|
224 |
+
"learning_rate": 2e-05,
|
225 |
+
"loss": 0.1392,
|
226 |
+
"step": 62
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"epoch": 0.64,
|
230 |
+
"grad_norm": 5.157987117767334,
|
231 |
+
"learning_rate": 2e-05,
|
232 |
+
"loss": 0.3096,
|
233 |
+
"step": 64
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 0.66,
|
237 |
+
"grad_norm": 2.6640563011169434,
|
238 |
+
"learning_rate": 2e-05,
|
239 |
+
"loss": 0.3555,
|
240 |
+
"step": 66
|
241 |
+
},
|
242 |
+
{
|
243 |
+
"epoch": 0.68,
|
244 |
+
"grad_norm": 0.3148294985294342,
|
245 |
+
"learning_rate": 2e-05,
|
246 |
+
"loss": 0.0494,
|
247 |
+
"step": 68
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"epoch": 0.7,
|
251 |
+
"grad_norm": 3.636918067932129,
|
252 |
+
"learning_rate": 2e-05,
|
253 |
+
"loss": 0.1895,
|
254 |
+
"step": 70
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 0.72,
|
258 |
+
"grad_norm": 4.029793739318848,
|
259 |
+
"learning_rate": 2e-05,
|
260 |
+
"loss": 0.2573,
|
261 |
+
"step": 72
|
262 |
+
},
|
263 |
+
{
|
264 |
+
"epoch": 0.74,
|
265 |
+
"grad_norm": 0.7756206393241882,
|
266 |
+
"learning_rate": 2e-05,
|
267 |
+
"loss": 0.2483,
|
268 |
+
"step": 74
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"epoch": 0.76,
|
272 |
+
"grad_norm": 4.137673377990723,
|
273 |
+
"learning_rate": 2e-05,
|
274 |
+
"loss": 0.2288,
|
275 |
+
"step": 76
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"epoch": 0.78,
|
279 |
+
"grad_norm": 2.653560161590576,
|
280 |
+
"learning_rate": 2e-05,
|
281 |
+
"loss": 0.1887,
|
282 |
+
"step": 78
|
283 |
+
},
|
284 |
+
{
|
285 |
+
"epoch": 0.8,
|
286 |
+
"grad_norm": 6.49356746673584,
|
287 |
+
"learning_rate": 2e-05,
|
288 |
+
"loss": 0.4987,
|
289 |
+
"step": 80
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"epoch": 0.82,
|
293 |
+
"grad_norm": 6.035229682922363,
|
294 |
+
"learning_rate": 2e-05,
|
295 |
+
"loss": 0.2783,
|
296 |
+
"step": 82
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"epoch": 0.84,
|
300 |
+
"grad_norm": 9.805408477783203,
|
301 |
+
"learning_rate": 2e-05,
|
302 |
+
"loss": 0.3604,
|
303 |
+
"step": 84
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"epoch": 0.86,
|
307 |
+
"grad_norm": 2.5182745456695557,
|
308 |
+
"learning_rate": 2e-05,
|
309 |
+
"loss": 0.1099,
|
310 |
+
"step": 86
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"epoch": 0.88,
|
314 |
+
"grad_norm": 4.8968892097473145,
|
315 |
+
"learning_rate": 2e-05,
|
316 |
+
"loss": 0.4873,
|
317 |
+
"step": 88
|
318 |
+
},
|
319 |
+
{
|
320 |
+
"epoch": 0.9,
|
321 |
+
"grad_norm": 2.410318374633789,
|
322 |
+
"learning_rate": 2e-05,
|
323 |
+
"loss": 0.0804,
|
324 |
+
"step": 90
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"epoch": 0.92,
|
328 |
+
"grad_norm": 3.7177977561950684,
|
329 |
+
"learning_rate": 2e-05,
|
330 |
+
"loss": 0.3599,
|
331 |
+
"step": 92
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"epoch": 0.94,
|
335 |
+
"grad_norm": 1.3571261167526245,
|
336 |
+
"learning_rate": 2e-05,
|
337 |
+
"loss": 0.1086,
|
338 |
+
"step": 94
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 0.96,
|
342 |
+
"grad_norm": 1.055740475654602,
|
343 |
+
"learning_rate": 2e-05,
|
344 |
+
"loss": 0.2812,
|
345 |
+
"step": 96
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"epoch": 0.98,
|
349 |
+
"grad_norm": 5.602509498596191,
|
350 |
+
"learning_rate": 2e-05,
|
351 |
+
"loss": 0.1919,
|
352 |
+
"step": 98
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"epoch": 1.0,
|
356 |
+
"grad_norm": 1.4338890314102173,
|
357 |
+
"learning_rate": 2e-05,
|
358 |
+
"loss": 0.1665,
|
359 |
+
"step": 100
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 1.0,
|
363 |
+
"step": 100,
|
364 |
+
"total_flos": 2.665628858266419e+16,
|
365 |
+
"train_loss": 0.2277008819580078,
|
366 |
+
"train_runtime": 433.6026,
|
367 |
+
"train_samples_per_second": 0.923,
|
368 |
+
"train_steps_per_second": 0.231
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"logging_steps": 2,
|
372 |
+
"max_steps": 100,
|
373 |
+
"num_input_tokens_seen": 0,
|
374 |
+
"num_train_epochs": 1,
|
375 |
+
"save_steps": 500,
|
376 |
+
"stateful_callbacks": {
|
377 |
+
"TrainerControl": {
|
378 |
+
"args": {
|
379 |
+
"should_epoch_stop": false,
|
380 |
+
"should_evaluate": false,
|
381 |
+
"should_log": false,
|
382 |
+
"should_save": false,
|
383 |
+
"should_training_stop": false
|
384 |
+
},
|
385 |
+
"attributes": {}
|
386 |
+
}
|
387 |
+
},
|
388 |
+
"total_flos": 2.665628858266419e+16,
|
389 |
+
"train_batch_size": 4,
|
390 |
+
"trial_name": null,
|
391 |
+
"trial_params": null
|
392 |
+
}
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round10.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:73c55ae4da4a9eb5ba0b74a710c4c1c538dade95131d4a7f43aa44f9a7150680
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round12.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4eae3d7d81ae40241aa37d662628362786ee3bc318cdabd4056f28ec4a7325fe
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round15.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a813877ea30a92cea3df1dd7dbb8e79a464957ec3606ba43c7cb03e9cda613fe
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round17.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:773d23a64f72f59916f4e934e34309aef4352c3c691c1155a420f14d8248d7f6
|
3 |
+
size 389170582
|
client_states_fedavg_bs4_saveoptim_lr2e-5_sc135_4tasks_5rounds_fixitr100_T0125_decay099/5_client_model_round2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c29d0e9bd3f3eac817566ff565c02b48193ff1a6f317b541b7b307a3fd8309c
|
3 |
+
size 389170122
|