lodestones commited on
Commit
461895e
·
verified ·
1 Parent(s): 2684334

Upload folder using huggingface_hub

Browse files
priming_e2/2025-01-25_23-35-59.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a512414cceeca05b53ed1ae6c3c167fddc9eaa6fbf435eb88ef922fd9cc01f27
3
+ size 17800230648
priming_e2/2025-01-26_00-46-46.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fee66697580f1be8cc8151fc51b5960cc5121f6c84621d7bfe109ff5f85ed0ac
3
+ size 17800230648
priming_e2/training_config.json CHANGED
@@ -4,14 +4,14 @@
4
  "cache_minibatch": 8,
5
  "train_minibatch": 4,
6
  "offload_param_count": 17000000000,
7
- "lr": 1e-05,
8
  "weight_decay": 0.0,
9
  "warmup_steps": 5,
10
  "change_layer_every": 100000000000,
11
  "trained_single_blocks": 38,
12
  "trained_double_blocks": 19,
13
  "save_every": 50,
14
- "save_folder": "model_priming",
15
  "wandb_project": "optimal transport unlocked",
16
  "wandb_run": "priming",
17
  "wandb_entity": null
 
4
  "cache_minibatch": 8,
5
  "train_minibatch": 4,
6
  "offload_param_count": 17000000000,
7
+ "lr": 2e-06,
8
  "weight_decay": 0.0,
9
  "warmup_steps": 5,
10
  "change_layer_every": 100000000000,
11
  "trained_single_blocks": 38,
12
  "trained_double_blocks": 19,
13
  "save_every": 50,
14
+ "save_folder": "model_priming_low_lr",
15
  "wandb_project": "optimal transport unlocked",
16
  "wandb_run": "priming",
17
  "wandb_entity": null