Saving weights and logs of step 1000
Browse files
events.out.tfevents.1635874712.t1v-n-f6f5b6cc-w-0.906011.0.v2 → events.out.tfevents.1635877753.t1v-n-f6f5b6cc-w-0.912674.0.v2
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:95f27c4c3af3da4607838029ca94c7ecc087d9a100294e7a744433c72e8e97f1
|
3 |
+
size 147207
|
flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 497764120
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9b32778e6c2b848bdc5d724eb1774483f64902c76609aa8ca75cfc9157dc711b
|
3 |
size 497764120
|
run.sh
CHANGED
@@ -11,11 +11,11 @@ python run_clm_flax.py \
|
|
11 |
--block_size="512" \
|
12 |
--per_device_train_batch_size="8" \
|
13 |
--per_device_eval_batch_size="8" \
|
14 |
-
--learning_rate="
|
15 |
-
--warmup_steps="
|
16 |
--adam_beta1="0.9" --adam_beta2="0.98" --weight_decay="0.01" \
|
17 |
--overwrite_output_dir \
|
18 |
-
--num_train_epochs="
|
19 |
--logging_steps="500" \
|
20 |
--save_steps="1000" \
|
21 |
--eval_steps="1000" \
|
|
|
11 |
--block_size="512" \
|
12 |
--per_device_train_batch_size="8" \
|
13 |
--per_device_eval_batch_size="8" \
|
14 |
+
--learning_rate="3e-5" \
|
15 |
+
--warmup_steps="500" \
|
16 |
--adam_beta1="0.9" --adam_beta2="0.98" --weight_decay="0.01" \
|
17 |
--overwrite_output_dir \
|
18 |
+
--num_train_epochs="10" \
|
19 |
--logging_steps="500" \
|
20 |
--save_steps="1000" \
|
21 |
--eval_steps="1000" \
|